def test_renamedFile(self):
        """
        Even if the implementation of a deprecated function is moved around on
        the filesystem, the line number in the warning emitted by
        L{deprecate.warnAboutFunction} points to a line in the implementation of
        the deprecated function.
        """
        from twisted_private_helper import module
        # Clean up the state resulting from that import; we're not going to use
        # this module, so it should go away.
        del sys.modules['twisted_private_helper']
        del sys.modules[module.__name__]

        # Rename the source directory
        self.package.moveTo(self.package.sibling(b'twisted_renamed_helper'))

        # Make sure importlib notices we've changed importable packages:
        if invalidate_caches:
            invalidate_caches()

        # Import the newly renamed version
        from twisted_renamed_helper import module
        self.addCleanup(sys.modules.pop, 'twisted_renamed_helper')
        self.addCleanup(sys.modules.pop, module.__name__)

        module.callTestFunction()
        warningsShown = self.flushWarnings([module.testFunction])
        warnedPath = FilePath(warningsShown[0]["filename"].encode("utf-8"))
        expectedPath = self.package.sibling(
            b'twisted_renamed_helper').child(b'module.py')
        self.assertSamePath(warnedPath, expectedPath)
        self.assertEqual(warningsShown[0]["lineno"], 9)
        self.assertEqual(warningsShown[0]["message"], "A Warning String")
        self.assertEqual(len(warningsShown), 1)
Example #2
0
 def test_pyc_always_writable(self):
     # Initially read-only .pyc files on Windows used to cause problems
     # with later updates, see issue #6074 for details
     with _ready_to_import() as (name, path):
         # Write a Python file, make it read-only and import it
         with open(path, 'w') as f:
             f.write("x = 'original'\n")
         # Tweak the mtime of the source to ensure pyc gets updated later
         s = os.stat(path)
         os.utime(path, (s.st_atime, s.st_mtime-100000000))
         os.chmod(path, 0o400)
         m = __import__(name)
         self.assertEqual(m.x, 'original')
         # Change the file and then reimport it
         os.chmod(path, 0o600)
         with open(path, 'w') as f:
             f.write("x = 'rewritten'\n")
         unload(name)
         importlib.invalidate_caches()
         m = __import__(name)
         self.assertEqual(m.x, 'rewritten')
         # Now delete the source file and check the pyc was rewritten
         unlink(path)
         unload(name)
         importlib.invalidate_caches()
         bytecode_only = path + "c"
         os.rename(importlib.util.cache_from_source(path), bytecode_only)
         m = __import__(name)
         self.assertEqual(m.x, 'rewritten')
Example #3
0
 def _check_package(self, depth):
     pkg_dir, mod_fname, mod_name = (
            self._make_pkg("x=1\n", depth, "__main__"))
     pkg_name, _, _ = mod_name.rpartition(".")
     forget(mod_name)
     try:
         if verbose: print("Running from source:", pkg_name)
         d1 = run_module(pkg_name) # Read from source
         self.assertIn("x", d1)
         self.assertTrue(d1["x"] == 1)
         del d1 # Ensure __loader__ entry doesn't keep file open
         importlib.invalidate_caches()
         __import__(mod_name)
         os.remove(mod_fname)
         make_legacy_pyc(mod_fname)
         unload(mod_name)  # In case loader caches paths
         if verbose: print("Running from compiled:", pkg_name)
         importlib.invalidate_caches()
         d2 = run_module(pkg_name) # Read from bytecode
         self.assertIn("x", d2)
         self.assertTrue(d2["x"] == 1)
         del d2 # Ensure __loader__ entry doesn't keep file open
     finally:
         self._del_pkg(pkg_dir, depth, pkg_name)
     if verbose: print("Package executed successfully")
Example #4
0
 def test_package___file__(self):
     try:
         m = __import__('pep3147')
     except ImportError:
         pass
     else:
         self.fail("pep3147 module already exists: %r" % (m,))
     # Test that a package's __file__ points to the right source directory.
     os.mkdir('pep3147')
     sys.path.insert(0, os.curdir)
     def cleanup():
         if sys.path[0] == os.curdir:
             del sys.path[0]
         shutil.rmtree('pep3147')
     self.addCleanup(cleanup)
     # Touch the __init__.py file.
     support.create_empty_file('pep3147/__init__.py')
     importlib.invalidate_caches()
     expected___file__ = os.sep.join(('.', 'pep3147', '__init__.py'))
     m = __import__('pep3147')
     self.assertEqual(m.__file__, expected___file__, (m.__file__, m.__path__, sys.path, sys.path_importer_cache))
     # Ensure we load the pyc file.
     support.unload('pep3147')
     m = __import__('pep3147')
     support.unload('pep3147')
     self.assertEqual(m.__file__, expected___file__, (m.__file__, m.__path__, sys.path, sys.path_importer_cache))
Example #5
0
 def _check_package(self, depth, alter_sys=False):
     pkg_dir, mod_fname, mod_name = (
            self._make_pkg(example_source, depth, "__main__"))
     pkg_name = mod_name.rpartition(".")[0]
     forget(mod_name)
     expected_ns = example_namespace.copy()
     expected_ns.update({
         "__name__": mod_name,
         "__file__": mod_fname,
         "__package__": pkg_name,
     })
     if alter_sys:
         expected_ns.update({
             "run_argv0": mod_fname,
             "run_name_in_sys_modules": True,
             "module_in_sys_modules": True,
         })
     def create_ns(init_globals):
         return run_module(pkg_name, init_globals, alter_sys=alter_sys)
     try:
         if verbose > 1: print("Running from source:", pkg_name)
         self.check_code_execution(create_ns, expected_ns)
         importlib.invalidate_caches()
         __import__(mod_name)
         os.remove(mod_fname)
         make_legacy_pyc(mod_fname)
         unload(mod_name)  # In case loader caches paths
         if verbose > 1: print("Running from compiled:", pkg_name)
         importlib.invalidate_caches()
         self._fix_ns_for_legacy_pyc(expected_ns, alter_sys)
         self.check_code_execution(create_ns, expected_ns)
     finally:
         self._del_pkg(pkg_dir, depth, pkg_name)
     if verbose > 1: print("Package executed successfully")
Example #6
0
def memories() :
    """Load python modules from memory folder"""

    # Invalidates current cache
    importlib.invalidate_caches()

    # Path where the modules are stored
    memory_path = "memory"
    knowledge = list()

    # If the folder exists, get the files
    if os.path.isdir(memory_path):
        memories = os.listdir(memory_path)
    else:
        logger.warn("%s missing, i'm useless :(" % memory_path)
        return knowledge

    # For each .py file, get name and load the module
    for memory in memories :
        if memory.find("__") == -1 and memory.find(".pyc") == -1 :
            pypos = memory.find(".py")
            memory_name = memory[:pypos]
            try:
                memory = importlib.import_module(memory_path + "." + memory_name)
                knowledge.append(importlib.reload(memory))
            except:
                logger.error("%s is confusing, skipping" % (memory_name))

    return knowledge
Example #7
0
    def test_module_with_large_stack(self, module='longlist'):
        # Regression test for http://bugs.python.org/issue561858.
        filename = module + '.py'

        # Create a file with a list of 65000 elements.
        with open(filename, 'w') as f:
            f.write('d = [\n')
            for i in range(65000):
                f.write('"",\n')
            f.write(']')

        try:
            # Compile & remove .py file; we only need .pyc (or .pyo).
            # Bytecode must be relocated from the PEP 3147 bytecode-only location.
            py_compile.compile(filename)
        finally:
            unlink(filename)

        # Need to be able to load from current dir.
        sys.path.append('')
        importlib.invalidate_caches()

        try:
            make_legacy_pyc(filename)
            # This used to crash.
            exec('import ' + module)
        finally:
            # Cleanup.
            del sys.path[-1]
            unlink(filename + 'c')
            unlink(filename + 'o')
Example #8
0
    def test_package___cached___from_pyc(self):
        # Like test___cached__ but ensuring __cached__ when imported from a
        # PEP 3147 pyc file.
        def cleanup():
            rmtree("pep3147")
            unload("pep3147.foo")
            unload("pep3147")

        os.mkdir("pep3147")
        self.addCleanup(cleanup)
        # Touch the __init__.py
        with open(os.path.join("pep3147", "__init__.py"), "w"):
            pass
        with open(os.path.join("pep3147", "foo.py"), "w"):
            pass
        importlib.invalidate_caches()
        m = __import__("pep3147.foo")
        unload("pep3147.foo")
        unload("pep3147")
        importlib.invalidate_caches()
        m = __import__("pep3147.foo")
        init_pyc = imp.cache_from_source(os.path.join("pep3147", "__init__.py"))
        self.assertEqual(m.__cached__, os.path.join(os.curdir, init_pyc))
        foo_pyc = imp.cache_from_source(os.path.join("pep3147", "foo.py"))
        self.assertEqual(sys.modules["pep3147.foo"].__cached__, os.path.join(os.curdir, foo_pyc))
Example #9
0
 def reload(self):
     try:
         invalidate_caches()
         reload_module(self.module)
         self.register_handlers()
     except:
         tyk.log_error( "Reload error:" )
Example #10
0
    def __init__(self, filepath, bundle_root_path=None):
        tyk.log( "Loading module: '{0}'".format(filepath), "info")
        self.filepath = filepath
        self.handlers = {}

        self.bundle_id = filepath
        self.bundle_root_path = bundle_root_path

        self.imported_modules = []
        
        module_splits = filepath.split('_')
        self.api_id, self.middleware_id = module_splits[0], module_splits[1]

        self.module_path = os.path.join(self.bundle_root_path, filepath)
        self.parse_manifest()

        self.mw_path = os.path.join(self.module_path, "middleware.py")

        # Fallback for single file bundles:
        if len(self.manifest['file_list']) == 1:
            self.mw_path = os.path.join(self.module_path, self.manifest['file_list'][0])

        try:
            self.loader = MiddlewareLoader(self)
            sys.meta_path.append(self.loader)
            invalidate_caches()
            self.module = imp.load_source(filepath, self.mw_path)
            self.register_handlers()
            self.cleanup()
        except Exception as e:
            tyk.log_error("Middleware initialization error: {0}".format(e))
Example #11
0
def load_project_migrations(project_dir):
    migration_file_paths = [
        os.path.relpath(file_path, project_dir)
        for file_path in find_project_migrations(project_dir)
    ]
    migration_module_paths = [
        file_path[:-3].replace(os.sep, '.')
        for file_path in migration_file_paths
    ]

    # we need to both
    invalidate_caches()
    migration_base_paths = set([
        migration_module_path.rpartition('.')[0]
        for migration_module_path in migration_module_paths
    ])

    for migration_base_path in migration_base_paths:
        reload(import_module(migration_base_path))

    migration_modules = [
        reload(import_module(module_path))
        for module_path in migration_module_paths
    ]

    migration_classes = [
        getattr(migration_module, 'Migration')
        for migration_module in migration_modules
    ]
    return migration_classes
Example #12
0
 def _possibly_invalidate_import_caches(self):
     # invalidate caches if we can (py33 and above)
     try:
         from importlib import invalidate_caches
     except ImportError:
         return
     invalidate_caches()
Example #13
0
    def _check_relative_imports(self, depth, run_name=None):
        contents = r"""\
from __future__ import absolute_import
from . import sibling
from ..uncle.cousin import nephew
"""
        pkg_dir, mod_fname, mod_name = (
               self._make_pkg(contents, depth))
        try:
            self._add_relative_modules(pkg_dir, contents, depth)
            pkg_name = mod_name.rpartition('.')[0]
            if verbose: print("Running from source:", mod_name)
            d1 = run_module(mod_name, run_name=run_name) # Read from source
            self.assertIn("__package__", d1)
            self.assertTrue(d1["__package__"] == pkg_name)
            self.assertIn("sibling", d1)
            self.assertIn("nephew", d1)
            del d1 # Ensure __loader__ entry doesn't keep file open
            importlib.invalidate_caches()
            __import__(mod_name)
            os.remove(mod_fname)
            make_legacy_pyc(mod_fname)
            unload(mod_name)  # In case the loader caches paths
            if verbose: print("Running from compiled:", mod_name)
            importlib.invalidate_caches()
            d2 = run_module(mod_name, run_name=run_name) # Read from bytecode
            self.assertIn("__package__", d2)
            self.assertTrue(d2["__package__"] == pkg_name)
            self.assertIn("sibling", d2)
            self.assertIn("nephew", d2)
            del d2 # Ensure __loader__ entry doesn't keep file open
        finally:
            self._del_pkg(pkg_dir, depth, mod_name)
        if verbose: print("Module executed successfully")
Example #14
0
def command_reload(bot, data, args):
    command_registry.clear()

    old_modules = {k for k in sys.modules if k.startswith("modules.")}
    importlib.invalidate_caches()

    clear_signals()

    for module in old_modules:
        del sys.modules[module]

    modules = {"modules.{}".format(i[:-3]) for i in os.listdir("modules") if i.endswith(".py") and not i.startswith("__")}
    modules -= {"modules.commands", "modules.reload"}

    importlib.import_module("modules.commands")
    importlib.import_module("modules.reload")

    successful = []

    for module in modules:
        try:
            importlib.import_module(module)
            successful.append(module[8:])
        except SyntaxError:
            stuff_is_broken(bot, data["reply_target"], module)

    bot.say(data["reply_target"], "Reload complete. Reloaded modules {} successfully.".format(", ".join(sorted(successful))))
Example #15
0
 def setUp(self):
     self.source = TESTFN + '.py'
     self._clean()
     with open(self.source, 'w') as fp:
         print('# This is a test file written by test_import.py', file=fp)
     sys.path.insert(0, os.curdir)
     importlib.invalidate_caches()
Example #16
0
 def test_UNC_path(self):
     with open(os.path.join(self.path, 'test_unc_path.py'), 'w') as f:
         f.write("testdata = 'test_unc_path'")
     importlib.invalidate_caches()
     # Create the UNC path, like \\myhost\c$\foo\bar.
     path = os.path.abspath(self.path)
     import socket
     hn = socket.gethostname()
     drive = path[0]
     unc = "\\\\%s\\%s$"%(hn, drive)
     unc += path[2:]
     try:
         os.listdir(unc)
     except OSError as e:
         if e.errno in (errno.EPERM, errno.EACCES):
             # See issue #15338
             self.skipTest("cannot access administrative share %r" % (unc,))
         raise
     sys.path.insert(0, unc)
     try:
         mod = __import__("test_unc_path")
     except ImportError as e:
         self.fail("could not import 'test_unc_path' from %r: %r"
                   % (unc, e))
     self.assertEqual(mod.testdata, 'test_unc_path')
     self.assertTrue(mod.__file__.startswith(unc), mod.__file__)
     unload("test_unc_path")
Example #17
0
 def test_package___cached___from_pyc(self):
     # Like test___cached__ but ensuring __cached__ when imported from a
     # PEP 3147 pyc file.
     def cleanup():
         rmtree('pep3147')
         unload('pep3147.foo')
         unload('pep3147')
     os.mkdir('pep3147')
     self.addCleanup(cleanup)
     # Touch the __init__.py
     with open(os.path.join('pep3147', '__init__.py'), 'w'):
         pass
     with open(os.path.join('pep3147', 'foo.py'), 'w'):
         pass
     importlib.invalidate_caches()
     m = __import__('pep3147.foo')
     unload('pep3147.foo')
     unload('pep3147')
     importlib.invalidate_caches()
     m = __import__('pep3147.foo')
     init_pyc = importlib.util.cache_from_source(
         os.path.join('pep3147', '__init__.py'))
     self.assertEqual(m.__cached__, os.path.join(os.curdir, init_pyc))
     foo_pyc = importlib.util.cache_from_source(os.path.join('pep3147', 'foo.py'))
     self.assertEqual(sys.modules['pep3147.foo'].__cached__,
                      os.path.join(os.curdir, foo_pyc))
Example #18
0
        def test_with_extension(ext):
            # The extension is normally ".py", perhaps ".pyw".
            source = TESTFN + ext
            if is_jython:
                pyc = TESTFN + "$py.class"
            else:
                pyc = TESTFN + ".pyc"

            with open(source, "w") as f:
                print("# This tests Python's ability to import a",
                      ext, "file.", file=f)
                a = random.randrange(1000)
                b = random.randrange(1000)
                print("a =", a, file=f)
                print("b =", b, file=f)

            if TESTFN in sys.modules:
                del sys.modules[TESTFN]
            importlib.invalidate_caches()
            try:
                try:
                    mod = __import__(TESTFN)
                except ImportError as err:
                    self.fail("import from %s failed: %s" % (ext, err))

                self.assertEqual(mod.a, a,
                    "module loaded (%s) but contents invalid" % mod)
                self.assertEqual(mod.b, b,
                    "module loaded (%s) but contents invalid" % mod)
            finally:
                forget(TESTFN)
                unlink(source)
                unlink(pyc)
Example #19
0
 def create_module(self, mod, contents, ext=".py"):
     fname = os.path.join(TESTFN, mod + ext)
     with open(fname, "w") as f:
         f.write(contents)
     self.addCleanup(unload, mod)
     importlib.invalidate_caches()
     return fname
Example #20
0
 def run(self):
     """ Runs the simulation and puts results back into Python objects """
     # Calculate the number of compartments
     if len(self._compartments) == 0:
         raise RuntimeError("No compartments found!")
     # Create & render templates for simulation-specific files
     template_dir = self._render_templates(
         {"NUM_COMPARTMENTS": len(self._compartments)})
     # Once templates are rendered, perform compilation
     # time.sleep(15)
     subprocess.check_call(
         ["make", "-C", template_dir.name, "-j1", "all"])
     # Invalidate cache and load dynamic extensions
     # TODO: Change this path to something platform-specific (autodetect)
     sys.path.append(
         os.path.join(template_dir.name, "build", "lib.linux-x86_64-3.4"))
     importlib.invalidate_caches()
     myriad_comm_mod = importlib.import_module("myriad_comm")
     for dependency in getattr(self, "dependencies"):
         if dependency.__name__ != "MyriadObject":
             importlib.import_module(dependency.__name__.lower())
     # Run simulation and return the communicator object back
     comm = SubprocessCommunicator(
         myriad_comm_mod, os.path.join(template_dir.name, "main.bin"))
     comm.spawn_child()
     time.sleep(0.25)  # FIXME: Change this sleep to a wait of some kind
     comm.setup_connection()
     return comm
Example #21
0
    def load_plugin(self, plugin_module):
        setup = None
        if not plugin_module:
            return
        importlib.invalidate_caches()
        if plugin_module in self.available_plugins:
            retry = True
            while retry:
                try:
                    setup = importlib.import_module(plugin_module + ".setup")
                    retry = False
                except:
                    e = sys.exc_info()
                    error_dialog = ErrorDialog(ctrl.main)
                    error_dialog.set_error('%s, line %s\n%s: %s' % (
                        plugin_module + ".setup.py", e[2].tb_lineno, e[0].__name__, e[1]))
                    error_dialog.set_traceback(traceback.format_exc())
                    retry = error_dialog.exec_()
                    setup = None
        else:
            plugin_path = os.path.join(plugin_module)

            if not os.path.exists(plugin_path):
                print('Plugin not found: ', plugin_path)
                raise FileNotFoundError

            module_name = os.path.basename(plugin_path)
            spec = importlib.util.spec_from_file_location(f"{module_name}.setup", os.path.join(plugin_path, "setup.py"))
            setup = importlib.util.module_from_spec(spec)
            spec.loader.exec_module(setup)
        return setup
Example #22
0
def get_commands(dont_import = False):
    """
    """

    cache_raw_data = read_all(COMMANDS_CACHE)
    if cache_raw_data is None:
        return None
    cache_data = cache_raw_data.split('\n')
    cache_data = [x.strip() for x in cache_data]
    cache_data = [x for x in cache_data if x]
    cache_data = [x.split(' ') for x in cache_data]
    cache_data = [[x for x in y if x] for y in cache_data]
    if not cache_data:
        return []
    length_set = dict([(len(x), 1) for x in cache_data])
    if len(length_set) != 1 or 2 not in length_set:
        return None
    cache_data = [(x[0].strip(), x[1].strip()) for x in cache_data]
    if dont_import:
        return cache_data
    importlib.invalidate_caches()
    commands = []
    try:
        for x in cache_data:
            m = importlib.import_module("%s.%s" % x)
            if not m or not hasattr(m, 'get_command_class'):
                return None
            commands.append(m.get_command_class())
    except ImportError:
        return None
    return commands
Example #23
0
    def test_makemigrations_with_custom_name(self):
        """
        Makes sure that makemigrations generate a custom migration.
        """
        with self.temporary_migration_module() as migration_dir:

            def cmd(migration_count, migration_name, *args):
                try:
                    call_command("makemigrations", "migrations", "--verbosity", "0", "--name", migration_name, *args)
                except CommandError:
                    self.fail("Makemigrations errored in creating empty migration with custom name for a proper app.")
                migration_file = os.path.join(migration_dir, "%s_%s.py" % (migration_count, migration_name))
                # Check for existing migration file in migration folder
                self.assertTrue(os.path.exists(migration_file))
                with codecs.open(migration_file, "r", encoding="utf-8") as fp:
                    content = fp.read()
                    self.assertIn("# -*- coding: utf-8 -*-", content)
                    content = content.replace(" ", "")
                return content

            # generate an initial migration
            migration_name_0001 = "my_initial_migration"
            content = cmd("0001", migration_name_0001)
            self.assertIn("dependencies=[\n]", content)

            # Python 3.3+ importlib caches os.listdir() on some platforms like
            # Mac OS X (#23850).
            if hasattr(importlib, 'invalidate_caches'):
                importlib.invalidate_caches()

            # generate an empty migration
            migration_name_0002 = "my_custom_migration"
            content = cmd("0002", migration_name_0002, "--empty")
            self.assertIn("dependencies=[\n('migrations','0001_%s'),\n]" % migration_name_0001, content)
            self.assertIn("operations=[\n]", content)
Example #24
0
def resolve_name(import_name, silent=False, reload=False):
    """Imports an object based on a string.  This is useful if you want to
    use import paths as endpoints or something similar.  An import path can
    be specified either in dotted notation (``xml.sax.saxutils.escape``)
    or with a colon as object delimiter (``xml.sax.saxutils:escape``).

    If `silent` is True the return value will be `None` if the import fails.

    :param import_name: the dotted name for the object to import.
    :param silent: if set to `True` import errors are ignored and
                   `None` is returned instead.
    :param reload: if set to `True` modules that are already loaded will be
                   reloaded
    :return: imported object
    """
    # force the import name to automatically convert to strings
    import_name = bytestring(import_name)
    try:
        if ':' in import_name:
            module, obj = import_name.split(':', 1)
        elif '.' in import_name and import_name not in sys.modules:
            module, obj = import_name.rsplit('.', 1)
        else:
            module, obj = import_name, None
            # __import__ is not able to handle unicode strings in the fromlist

        mod = None
        # if the module is a package
        if reload and module in sys.modules:
            try:
                importlib.invalidate_caches()
            except Exception:
                pass
            try:
                mod = reload_module(sys.modules[module])
            except Exception:
                pass
        if not mod:
            if not obj:
                return __import__(module)
            try:
                mod = __import__(module, None, None, [obj])
            except ImportError:
                if ':' in import_name:
                    raise
                return __import__(import_name)
        if not obj:
            return mod
        try:
            return getattr(mod, obj)
        except AttributeError:
            # support importing modules not yet set up by the parent module
            # (or package for that matter)
            if ':' in import_name:
                raise
            return __import__(import_name)
    except ImportError as e:
        if not silent:
            raise_with_tb(ImportStringError(import_name, e))
Example #25
0
 def test_module(self):
     self._check_path_limitations(self.pkgname)
     create_empty_file(os.path.join(self.subpkgname, self.pkgname + '.py'))
     importlib.invalidate_caches()
     from areallylongpackageandmodulenametotestreprtruncation.areallylongpackageandmodulenametotestreprtruncation import areallylongpackageandmodulenametotestreprtruncation
     module = areallylongpackageandmodulenametotestreprtruncation
     self.assertEqual(repr(module), "<module %r from %r>" % (module.__name__, module.__file__))
     self.assertEqual(repr(sys), "<module 'sys' (built-in)>")
Example #26
0
 def setUp(self):
     self.sys_path = sys.path[:]
     self.orig_module = sys.modules.pop(self.module_name, None)
     os.mkdir(self.dir_name)
     with open(self.file_name, "w") as f:
         f.write(self.module_source)
     sys.path.insert(0, self.dir_name)
     importlib.invalidate_caches()
Example #27
0
def test_main():
    run_unittest(PkgutilTests, PkgutilPEP302Tests, ExtendPathTests,
                 NestedNamespacePackageTest, ImportlibMigrationTests)
    # this is necessary if test is run repeated (like when finding leaks)
    import zipimport
    import importlib
    zipimport._zip_directory_cache.clear()
    importlib.invalidate_caches()
Example #28
0
def ensure_current_migrations_module_is_loaded():
    # everytime within the same python process we add migrations we need to reload the migrations module
    # for it could be cached from a previous test
    if sys.version_info >= (3, 3):
        from importlib import invalidate_caches
        invalidate_caches()
    m = import_module('migrations')
    reload_module(m)
Example #29
0
    def __init__(self, themedir, siteconftemplate):
        self.installdir = sys.path[0] # Note: this should ideally be gotten from somewhere else.
        self.wsgiapp = Bottle()
        self.apiclass = sandbox.csg2api(self.wsgiapp, self.runningsessions)
        
        # Parse arguments
        argparser = argparse.ArgumentParser()
        argparser.add_argument('sitesfolder', metavar='<site storage folder>', help="path to the folder containing sites")
        argparser.add_argument('siteroot', metavar='<site name>', help="site name/the folder with config.json")
        self.parsedargs = argparser.parse_args()

        # Setup configuration and path to site
        self.sitepath = os.path.abspath(os.path.join(self.parsedargs.sitesfolder, self.parsedargs.siteroot))
        siteconffile = open(os.path.join(self.sitepath, "config.json"), mode="rt", encoding="utf-8")
        self.siteconf = configman.normalize_config(json.load(siteconffile), self.parsedargs.siteroot)
        siteconffile.close()
        
        # Setup theming
        themesroot = os.path.abspath(themedir)
        self.themepath = os.path.join(themesroot, self.siteconf["site"]["theme"])
        os.chdir(self.sitepath)
        
        # Assign routes (done before the site code to allow overrides)
        # This is functionally equivalent of what the language does, but makes sure Bottle will call the right instance.
        self.getrandstaticredirect = self.wsgiapp.route("/rand/<filepath:path>")(self.getrandstaticredirect)
        self.getstatic = self.wsgiapp.route("/static/<filepath:path>")(self.getstatic)
        self.compilethemesass = self.wsgiapp.route("/theme/sass/master.scss")(self.compilethemesass)
        self.getthemeasset = self.wsgiapp.route("/theme/static/<filepath:path>")(self.getthemeasset)
        self.compilesass = self.wsgiapp.route("/sass/<filename:re:.*\.scss>")(self.compilesass)
        self.catchall = self.wsgiapp.route("/")(
            self.wsgiapp.route("/<filepath:path>")(
                view(os.path.join(self.themepath, "master.tpl"))(self.catchall)
            )
        )
        self.dologin = self.wsgiapp.route("/login", method="POST")(self.dologin)
        
        # If they have code, run it
        if "additional_code" in self.siteconf["site"]:
            oldpath = sys.path
            sys.path[0] = self.sitepath
            importlib.invalidate_caches()
            with open(os.path.join(self.sitepath, self.siteconf["site"]["additional_code"]), mode="rt") as codefile:
                sandbox.create_box(codefile.read(), self.wsgiapp, apiclass=self.apiclass) # This file is excempt from the linking clauses in the license, allowing it to be non-(A)GPL.
            sys.path = oldpath
            importlib.invalidate_caches()

        # Configure Nginx
        socketpath = "/tmp/csg2_{}.sock".format(self.siteconf["site"]["domain_name"].replace(".", "_"))
        print("-> Generating config.")
        with open(os.path.abspath(siteconftemplate), mode="rt", encoding="utf-8") as sitetemplate:
            sitetemplatetxt = sitetemplate.read()
            newsite = sitetemplatetxt.replace("%%SERVERNAME%%", self.siteconf["site"]["domain_name"]).replace("%%SOCKETPATH%%", socketpath)
            with open("/tmp/{}.csg2nginx".format(self.siteconf["site"]["domain_name"].replace(".", "_")), mode="wt", encoding="utf-8") as newconf:
                newconf.write(newsite)
        
        # Serve site.
        print("-> Serving up site on '{}'.".format(socketpath))
        waitress_serve(self.wsgiapp, unix_socket=socketpath)
def _make_test_script(script_dir, script_basename,
                      source=test_source, omit_suffix=False):
    to_return = make_script(script_dir, script_basename,
                            source, omit_suffix)
    # Hack to check explicit relative imports
    if script_basename == "check_sibling":
        make_script(script_dir, "sibling", "")
    importlib.invalidate_caches()
    return to_return
Example #31
0
def main():  # noqa: C901
    """Main entrypoint"""
    arguments = parse_arguments()
    loop = asyncio.get_event_loop()

    clients = SuperList()
    phones, authtoken = get_phones(arguments)
    api_token = get_api_token(arguments, arguments.default_app)
    proxy, conn = get_proxy(arguments)

    if web_available:
        web = (core.Web(
            data_root=arguments.data_root,
            api_token=api_token,
            proxy=proxy,
            connection=conn,
            hosting=arguments.hosting,
            default_app=arguments.default_app,
        ) if arguments.web else None)
    elif arguments.heroku_web_internal:
        raise RuntimeError("Web required but unavailable")
    else:
        web = None

    save_config_key("port", arguments.port)

    while api_token is None:
        if arguments.no_auth:
            return
        if web:
            loop.run_until_complete(web.start(arguments.port))
            print("Web mode ready for configuration")  # noqa: T001
            if not arguments.heroku_web_internal:
                port = str(web.port)
                if platform.system(
                ) == "Linux" and not os.path.exists("/etc/os-release"):
                    print(f"Please visit http://localhost:{port}")
                else:
                    ipaddress = get("https://api.ipify.org").text
                    print(
                        f"Please visit http://{ipaddress}:{port} or http://localhost:{port}"
                    )
            loop.run_until_complete(web.wait_for_api_token_setup())
            api_token = web.api_token
        else:
            run_config({}, arguments.data_root)
            importlib.invalidate_caches()
            api_token = get_api_token(arguments)

    if os.environ.get("authorization_strings", False):
        if (os.environ.get("DYNO", False) or arguments.heroku_web_internal
                or arguments.heroku_deps_internal):
            app, _ = heroku.get_app(
                os.environ["authorization_strings"],
                os.environ["heroku_api_token"],
                api_token,
                False,
                True,
            )
        if arguments.heroku_web_internal:
            app.scale_formation_process(
                "worker-DO-NOT-TURN-ON-OR-THINGS-WILL-BREAK", 0)
            signal.signal(signal.SIGTERM, functools.partial(sigterm, app))
        elif arguments.heroku_deps_internal:
            try:
                app.scale_formation_process("web", 0)
                app.scale_formation_process(
                    "worker-DO-NOT-TURN-ON-OR-THINGS-WILL-BREAK", 0)
            except requests.exceptions.HTTPError as e:
                if e.response.status_code != 404:
                    # The dynos don't exist on the very first deployment, so don't try to scale
                    raise
            else:
                atexit.register(
                    functools.partial(
                        app.scale_formation_process,
                        "restarter-DO-NOT-TURN-ON-OR-THINGS-WILL-BREAK",
                        1,
                    ))
        elif arguments.heroku_restart_internal:
            signal.signal(signal.SIGTERM, functools.partial(sigterm, app))
            while True:
                time.sleep(60)
        elif os.environ.get("DYNO", False):
            signal.signal(signal.SIGTERM, functools.partial(sigterm, app))

    if authtoken:
        for phone, token in authtoken.items():
            try:
                clients += [
                    TelegramClient(
                        StringSession(token),
                        api_token.ID,
                        api_token.HASH,
                        connection=conn,
                        proxy=proxy,
                        connection_retries=None,
                    ).start()
                ]
            except ValueError:
                run_config({}, arguments.data_root)
                return

            clients[-1].phone = phone  # for consistency

    if not clients and not phones:
        if arguments.no_auth:
            return

        if web:
            if not web.running.is_set():
                loop.run_until_complete(web.start(arguments.port))
                print("Web mode ready for configuration")  # noqa: T001
                if not arguments.heroku_web_internal:
                    port = str(web.port)
                    if platform.system(
                    ) == "Linux" and not os.path.exists("/etc/os-release"):
                        print(f"Please visit http://localhost:{port}")
                    else:
                        ipaddress = get("https://api.ipify.org").text
                        print(
                            f"Please visit http://{ipaddress}:{port} or http://localhost:{port}"
                        )
            loop.run_until_complete(web.wait_for_clients_setup())
            arguments.heroku = web.heroku_api_token
            clients = web.clients
            for client in clients:
                if arguments.heroku:
                    session = StringSession()
                else:
                    session = SQLiteSession(
                        os.path.join(
                            arguments.data_root
                            or os.path.dirname(utils.get_base_dir()),
                            f"friendly-telegram-+{'X' * (len(client.phone) - 5)}{client.phone[-4:]}",
                        ))

                session.set_dc(
                    client.session.dc_id,
                    client.session.server_address,
                    client.session.port,
                )
                session.auth_key = client.session.auth_key
                if not arguments.heroku:
                    session.save()
                client.session = session
        else:
            try:
                phone = input("Please enter your phone: ")
                phones = {phone.split(":", maxsplit=1)[0]: phone}
            except EOFError:
                print("=" * 30)
                print(
                    "Hello. If you are seeing this, it means YOU ARE DOING SOMETHING WRONG!\n"
                    "It is likely that you tried to deploy to heroku -\n"
                    "you cannot do this via the web interface.\n"
                    "\n"
                    "To deploy to heroku, go to\n"
                    "https://friendly-telegram.gitlab.io/heroku to learn more\n"
                    "\n"
                    "In addition, you seem to have forked the friendly-telegram repo. THIS IS WRONG!\n"
                    "You should remove the forked repo, and read https://friendly-telegram.gitlab.io\n"
                    "\n"
                    "If you're not using Heroku, then you are using a non-interactive prompt but\n"
                    "you have not got a session configured, meaning authentication to Telegram is\n"
                    "impossible.\n"
                    "\n"
                    "THIS ERROR IS YOUR FAULT. DO NOT REPORT IT AS A BUG!\n"
                    "Goodbye.\n")

                sys.exit(1)

    for phone_id, phone in phones.items():
        if arguments.heroku:
            session = StringSession()
        else:
            session = os.path.join(
                arguments.data_root or os.path.dirname(utils.get_base_dir()),
                f"friendly-telegram{(('-' + phone_id) if phone_id else '')}",
            )

        try:
            client = TelegramClient(
                session,
                api_token.ID,
                api_token.HASH,
                connection=conn,
                proxy=proxy,
                connection_retries=None,
            )

            client.start()
            client.phone = phone

            clients.append(client)
        except sqlite3.OperationalError as ex:
            print(
                f"Error initialising phone {(phone or 'unknown')} {','.join(ex.args)}\n"  # noqa
                ": this is probably your fault. Try checking that this is the only instance running and"
                "that the session is not copied. If that doesn't help, delete the file named"
                f"'friendly-telegram-{phone if phone else ''}.session'")
            continue
        except (TypeError, AuthKeyDuplicatedError):
            os.remove(f"{session}.session")
            main()
        except (ValueError, ApiIdInvalidError):
            # Bad API hash/ID
            run_config({}, arguments.data_root)
            return
        except PhoneNumberInvalidError:
            print(
                "Please check the phone number. Use international format (+XX...)"  # noqa: T001
                " and don't put spaces in it.")
            continue

    if arguments.heroku:
        if isinstance(arguments.heroku, str):
            key = arguments.heroku
        else:
            key = input(
                "Please enter your Heroku API key (from https://dashboard.heroku.com/account): "
            ).strip()

        app = heroku.publish(clients, key, api_token)
        print(
            "Installed to heroku successfully! Type .help in Telegram for help."
        )  # noqa: T001
        if web:
            web.redirect_url = app.web_url
            web.ready.set()
            loop.run_until_complete(web.root_redirected.wait())
        return

    loop.set_exception_handler(lambda _, x: logging.error(
        "Exception on event loop! %s",
        x["message"],
        exc_info=x.get("exception", None),
    ))

    loops = [
        amain_wrapper(client, clients, web, arguments) for client in clients
    ]
    loop.run_until_complete(asyncio.gather(*loops))
Example #32
0
    def test_name_resolution(self):
        import logging
        import logging.handlers

        success_cases = (
            ('os', os),
            ('os.path', os.path),
            ('os.path:pathsep', os.path.pathsep),
            ('logging', logging),
            ('logging:', logging),
            ('logging.handlers', logging.handlers),
            ('logging.handlers:', logging.handlers),
            ('logging.handlers:SysLogHandler', logging.handlers.SysLogHandler),
            ('logging.handlers.SysLogHandler', logging.handlers.SysLogHandler),
            ('logging.handlers:SysLogHandler.LOG_ALERT',
                logging.handlers.SysLogHandler.LOG_ALERT),
            ('logging.handlers.SysLogHandler.LOG_ALERT',
                logging.handlers.SysLogHandler.LOG_ALERT),
            ('builtins.int', int),
            ('builtins:int', int),
            ('builtins.int.from_bytes', int.from_bytes),
            ('builtins:int.from_bytes', int.from_bytes),
            ('builtins.ZeroDivisionError', ZeroDivisionError),
            ('builtins:ZeroDivisionError', ZeroDivisionError),
            ('os:path', os.path),
        )

        failure_cases = (
            (None, TypeError),
            (1, TypeError),
            (2.0, TypeError),
            (True, TypeError),
            ('', ValueError),
            ('?abc', ValueError),
            ('abc/foo', ValueError),
            ('foo', ImportError),
            ('os.foo', AttributeError),
            ('os.foo:', ImportError),
            ('os.pth:pathsep', ImportError),
            ('logging.handlers:NoSuchHandler', AttributeError),
            ('logging.handlers:SysLogHandler.NO_SUCH_VALUE', AttributeError),
            ('logging.handlers.SysLogHandler.NO_SUCH_VALUE', AttributeError),
            ('ZeroDivisionError', ImportError),
            ('os.path.9abc', ValueError),
            ('9abc', ValueError),
        )

        # add some Unicode package names to the mix.

        unicode_words = ('\u0935\u092e\u0938',
                         '\xe9', '\xc8',
                         '\uc548\ub155\ud558\uc138\uc694',
                         '\u3055\u3088\u306a\u3089',
                         '\u3042\u308a\u304c\u3068\u3046',
                         '\u0425\u043e\u0440\u043e\u0448\u043e',
                         '\u0441\u043f\u0430\u0441\u0438\u0431\u043e',
                         '\u73b0\u4ee3\u6c49\u8bed\u5e38\u7528\u5b57\u8868')

        for uw in unicode_words:
            d = os.path.join(self.dirname, uw)
            try:
                os.makedirs(d, exist_ok=True)
            except  UnicodeEncodeError:
                # When filesystem encoding cannot encode uw: skip this test
                continue
            # make an empty __init__.py file
            f = os.path.join(d, '__init__.py')
            with open(f, 'w') as f:
                f.write('')
                f.flush()
            # now import the package we just created; clearing the caches is
            # needed, otherwise the newly created package isn't found
            importlib.invalidate_caches()
            mod = importlib.import_module(uw)
            success_cases += (uw, mod),
            if len(uw) > 1:
                failure_cases += (uw[:-1], ImportError),

        # add an example with a Unicode digit at the start
        failure_cases += ('\u0966\u0935\u092e\u0938', ValueError),

        for s, expected in success_cases:
            with self.subTest(s=s):
                o = pkgutil.resolve_name(s)
                self.assertEqual(o, expected)

        for s, exc in failure_cases:
            with self.subTest(s=s):
                with self.assertRaises(exc):
                    pkgutil.resolve_name(s)
Example #33
0
def install_extensions_with_config(pip_cfg):
    if not pip_cfg:
        return

    is_venv = (hasattr(sys, 'real_prefix')
               or getattr(sys, 'base_prefix', None) != sys.prefix)

    with tempfile.TemporaryDirectory() as td:
        # Prevent changing the Hopic version
        constraints_file = os.path.join(td, 'constraints.txt')
        with open(constraints_file, 'w', encoding='UTF-8') as cf:
            cf.write(f"{PACKAGE}=={get_package_version(PACKAGE)}\n")

        base_cmd = [
            sys.executable,
            '-m',
            'pip',
            'install',
            '-c',
            constraints_file,
        ]

        plog = logging.getLogger(PACKAGE)
        if plog.isEnabledFor(logging.DEBUG):
            base_cmd.append('--verbose')

        if not is_venv:
            base_cmd.append('--user')

        for spec in pip_cfg:
            cmd = base_cmd.copy()

            from_index = spec.get('from-index')
            if from_index is not None:
                cmd.extend(['--index-url', spec['from-index']])
            for index in spec['with-extra-index']:
                cmd.extend(['--extra-index-url', index])

            cmd.extend(spec['packages'])

            try:
                echo_cmd(subprocess.check_call, cmd, stdout=sys.__stderr__)
            except subprocess.CalledProcessError as exc:
                if not spec['with-extra-index']:
                    raise

                # This is the first version that fixes https://github.com/pypa/pip/issues/4195
                required_versionstr = "19.1"
                versionstr = metadata.version("pip")

                def try_int(s):
                    try:
                        return int(s)
                    except ValueError:
                        return s

                version = tuple(try_int(x) for x in versionstr.split("."))
                required_version = tuple(
                    try_int(x) for x in required_versionstr.split("."))
                if version < required_version:
                    log.error(
                        dedent("""\
                            pip failed to install with error code %i while using an extra-index.

                            The pip version available (%s) is older than %s and may contain a bug related to usage of --extra-index-url.

                            Consider updating pip to a more recent version.
                            For example: %s -m pip install --upgrade pip
                            """),
                        exc.returncode,
                        versionstr,
                        required_versionstr,
                        sys.executable,
                    )
                raise

    # Ensure newly installed packages can be imported
    importlib.invalidate_caches()
    get_entry_points.cache_clear()
Example #34
0
 def import_module(self):
     importlib.invalidate_caches()
     if self.path:
         return importlib.import_module(self.module, self.path)
     else:
         return importlib.import_module(self.module)
Example #35
0
def main(infile, outfile):
    try:
        boot_time = time.time()
        split_index = read_int(infile)
        if split_index == -1:  # for unit tests
            sys.exit(-1)

        version = utf8_deserializer.loads(infile)
        if version != "%d.%d" % sys.version_info[:2]:
            raise Exception(
                ("Python in worker has different version %s than that in " +
                 "driver %s, PySpark cannot run with different minor versions."
                 + "Please check environment variables PYSPARK_PYTHON and " +
                 "PYSPARK_DRIVER_PYTHON are correctly set.") %
                ("%d.%d" % sys.version_info[:2], version))

        # read inputs only for a barrier task
        isBarrier = read_bool(infile)
        boundPort = read_int(infile)
        secret = UTF8Deserializer().loads(infile)

        # set up memory limits
        memory_limit_mb = int(
            os.environ.get('PYSPARK_EXECUTOR_MEMORY_MB', "-1"))
        total_memory = resource.RLIMIT_AS
        try:
            if memory_limit_mb > 0:
                (soft_limit, hard_limit) = resource.getrlimit(total_memory)
                msg = "Current mem limits: {0} of max {1}\n".format(
                    soft_limit, hard_limit)
                print(msg, file=sys.stderr)

                # convert to bytes
                new_limit = memory_limit_mb * 1024 * 1024

                if soft_limit == resource.RLIM_INFINITY or new_limit < soft_limit:
                    msg = "Setting mem limits to {0} of max {1}\n".format(
                        new_limit, new_limit)
                    print(msg, file=sys.stderr)
                    resource.setrlimit(total_memory, (new_limit, new_limit))

        except (resource.error, OSError, ValueError) as e:
            # not all systems support resource limits, so warn instead of failing
            print("WARN: Failed to set memory limit: {0}\n".format(e),
                  file=sys.stderr)

        # initialize global state
        taskContext = None
        if isBarrier:
            taskContext = BarrierTaskContext._getOrCreate()
            BarrierTaskContext._initialize(boundPort, secret)
        else:
            taskContext = TaskContext._getOrCreate()
        # read inputs for TaskContext info
        taskContext._stageId = read_int(infile)
        taskContext._partitionId = read_int(infile)
        taskContext._attemptNumber = read_int(infile)
        taskContext._taskAttemptId = read_long(infile)
        taskContext._localProperties = dict()
        for i in range(read_int(infile)):
            k = utf8_deserializer.loads(infile)
            v = utf8_deserializer.loads(infile)
            taskContext._localProperties[k] = v

        shuffle.MemoryBytesSpilled = 0
        shuffle.DiskBytesSpilled = 0
        _accumulatorRegistry.clear()

        # fetch name of workdir
        spark_files_dir = utf8_deserializer.loads(infile)
        SparkFiles._root_directory = spark_files_dir
        SparkFiles._is_running_on_worker = True

        # fetch names of includes (*.zip and *.egg files) and construct PYTHONPATH
        add_path(
            spark_files_dir)  # *.py files that were added will be copied here
        num_python_includes = read_int(infile)
        for _ in range(num_python_includes):
            filename = utf8_deserializer.loads(infile)
            add_path(os.path.join(spark_files_dir, filename))
        if sys.version > '3':
            import importlib
            importlib.invalidate_caches()

        # fetch names and values of broadcast variables
        num_broadcast_variables = read_int(infile)
        for _ in range(num_broadcast_variables):
            bid = read_long(infile)
            if bid >= 0:
                path = utf8_deserializer.loads(infile)
                _broadcastRegistry[bid] = Broadcast(path=path)
            else:
                bid = -bid - 1
                _broadcastRegistry.pop(bid)

        _accumulatorRegistry.clear()
        eval_type = read_int(infile)
        if eval_type == PythonEvalType.NON_UDF:
            func, profiler, deserializer, serializer = read_command(
                pickleSer, infile)
        else:
            func, profiler, deserializer, serializer = read_udfs(
                pickleSer, infile, eval_type)

        init_time = time.time()

        def process():
            iterator = deserializer.load_stream(infile)
            serializer.dump_stream(func(split_index, iterator), outfile)

        if profiler:
            profiler.profile(process)
        else:
            process()
    except Exception:
        try:
            write_int(SpecialLengths.PYTHON_EXCEPTION_THROWN, outfile)
            write_with_length(traceback.format_exc().encode("utf-8"), outfile)
        except IOError:
            # JVM close the socket
            pass
        except Exception:
            # Write the error to stderr if it happened while serializing
            print("PySpark worker failed with exception:", file=sys.stderr)
            print(traceback.format_exc(), file=sys.stderr)
        sys.exit(-1)
    finish_time = time.time()
    report_times(outfile, boot_time, init_time, finish_time)
    write_long(shuffle.MemoryBytesSpilled, outfile)
    write_long(shuffle.DiskBytesSpilled, outfile)

    # Mark the beginning of the accumulators section of the output
    write_int(SpecialLengths.END_OF_DATA_SECTION, outfile)
    write_int(len(_accumulatorRegistry), outfile)
    for (aid, accum) in _accumulatorRegistry.items():
        pickleSer._write_with_length((aid, accum._value), outfile)

    # check end of stream
    if read_int(infile) == SpecialLengths.END_OF_STREAM:
        write_int(SpecialLengths.END_OF_STREAM, outfile)
    else:
        # write a different value to tell JVM to not reuse this worker
        write_int(SpecialLengths.END_OF_DATA_SECTION, outfile)
        sys.exit(-1)
Example #36
0
def _make_test_script(script_dir, script_basename, source=test_source):
    to_return = make_script(script_dir, script_basename, source)
    importlib.invalidate_caches()
    return to_return
Example #37
0
 def test_method_lacking(self):
     # There should be no issues if the method is not defined.
     key = 'gobbledeegook'
     sys.path_importer_cache[key] = imp.NullImporter('abc')
     self.addCleanup(lambda: sys.path_importer_cache.__delitem__(key))
     importlib.invalidate_caches()  # Shouldn't trigger an exception.
Example #38
0
def invalidate_import_caches():
    """Invalidate any import caches that may or may not exist."""
    if importlib and hasattr(importlib, "invalidate_caches"):
        importlib.invalidate_caches()
Example #39
0
def extract_wheel(fd):
    with zipfile.ZipFile(fd) as zf:
        zf.extractall(WHEEL_BASE_PATH)

    importlib.invalidate_caches()
Example #40
0
File: utils.py Project: xcyan/pt2pc
def get_model_module(model_def):
    importlib.invalidate_caches()
    return importlib.import_module(model_def)
Example #41
0
def main(infile, outfile):
    try:
        boot_time = time.time()
        split_index = read_int(infile)
        if split_index == -1:  # for unit tests
            exit(-1)

        version = utf8_deserializer.loads(infile)
        if version != "%d.%d" % sys.version_info[:2]:
            raise Exception(
                ("Python in worker has different version %s than that in " +
                 "driver %s, PySpark cannot run with different minor versions")
                % ("%d.%d" % sys.version_info[:2], version))

        # initialize global state
        shuffle.MemoryBytesSpilled = 0
        shuffle.DiskBytesSpilled = 0
        _accumulatorRegistry.clear()

        # fetch name of workdir
        spark_files_dir = utf8_deserializer.loads(infile)
        SparkFiles._root_directory = spark_files_dir
        SparkFiles._is_running_on_worker = True

        # fetch names of includes (*.zip and *.egg files) and construct PYTHONPATH
        add_path(
            spark_files_dir)  # *.py files that were added will be copied here
        num_python_includes = read_int(infile)
        for _ in range(num_python_includes):
            filename = utf8_deserializer.loads(infile)
            add_path(os.path.join(spark_files_dir, filename))
        if sys.version > '3':
            import importlib
            importlib.invalidate_caches()

        # fetch names and values of broadcast variables
        num_broadcast_variables = read_int(infile)
        for _ in range(num_broadcast_variables):
            bid = read_long(infile)
            if bid >= 0:
                path = utf8_deserializer.loads(infile)
                _broadcastRegistry[bid] = Broadcast(path=path)
            else:
                bid = -bid - 1
                _broadcastRegistry.pop(bid)

        _accumulatorRegistry.clear()
        command = pickleSer._read_with_length(infile)
        if isinstance(command, Broadcast):
            command = pickleSer.loads(command.value)
        func, profiler, deserializer, serializer = command
        init_time = time.time()

        def process():
            iterator = deserializer.load_stream(infile)
            serializer.dump_stream(func(split_index, iterator), outfile)

        if profiler:
            profiler.profile(process)
        else:
            process()
    except Exception:
        try:
            write_int(SpecialLengths.PYTHON_EXCEPTION_THROWN, outfile)
            write_with_length(traceback.format_exc().encode("utf-8"), outfile)
        except IOError:
            # JVM close the socket
            pass
        except Exception:
            # Write the error to stderr if it happened while serializing
            print("PySpark worker failed with exception:", file=sys.stderr)
            print(traceback.format_exc(), file=sys.stderr)
        exit(-1)
    finish_time = time.time()
    report_times(outfile, boot_time, init_time, finish_time)
    write_long(shuffle.MemoryBytesSpilled, outfile)
    write_long(shuffle.DiskBytesSpilled, outfile)

    # Mark the beginning of the accumulators section of the output
    write_int(SpecialLengths.END_OF_DATA_SECTION, outfile)
    write_int(len(_accumulatorRegistry), outfile)
    for (aid, accum) in _accumulatorRegistry.items():
        pickleSer._write_with_length((aid, accum._value), outfile)

    # check end of stream
    if read_int(infile) == SpecialLengths.END_OF_STREAM:
        write_int(SpecialLengths.END_OF_STREAM, outfile)
    else:
        # write a different value to tell JVM to not reuse this worker
        write_int(SpecialLengths.END_OF_DATA_SECTION, outfile)
        exit(-1)
Example #42
0
    # Generate our vendor-specific namespace. It may make use of the standard data types (most namespaces do,
    # because the standard root namespace contains important basic types), so we include it in the lookup path set.
    # The paths are hard-coded here for the sake of conciseness.
    pyuavcan.dsdl.generate_package(
        root_namespace_directory=os.path.join(script_path, '../dsdl/namespaces/sirius_cyber_corp/'),
        lookup_directories=[os.path.join(script_path, '../public_regulated_data_types/uavcan')],
        output_directory=dsdl_generated_dir,
    )
    # Generate the standard namespace. The order actually doesn't matter.
    pyuavcan.dsdl.generate_package(
        root_namespace_directory=os.path.join(script_path, '../public_regulated_data_types/uavcan'),
        output_directory=dsdl_generated_dir,
    )
    # Okay, we can try importing again. We need to clear the import cache first because Python's import machinery
    # requires that; see the docs for importlib.invalidate_caches() for more info.
    importlib.invalidate_caches()
    import sirius_cyber_corp
    import pyuavcan.application

# Import other namespaces we're planning to use. Nested namespaces are not auto-imported, so in order to reach,
# say, "uavcan.node.Heartbeat", you have to do "import uavcan.node".
import uavcan.node                      # noqa E402
import uavcan.diagnostic                # noqa E402
import uavcan.si.sample.temperature     # noqa E402


class DemoApplication:
    def __init__(self):
        # The interface to run the demo against is selected via the environment variable with a default option provided.
        # Virtual CAN bus is supported only on GNU/Linux, but other interfaces used here should be compatible
        # with at least Windows as well.
Example #43
0
 def setUp(self):
     remove_files(TESTFN)
     importlib.invalidate_caches()
Example #44
0
def reset_importlib_caches():
    try:
        importlib.invalidate_caches()
    except (ImportError, AttributeError):
        pass
Example #45
0
def main(infile, outfile):
    try:
        boot_time = time.time()
        split_index = read_int(infile)
        if split_index == -1:  # for unit tests
            sys.exit(-1)

        version = utf8_deserializer.loads(infile)
        if version != "%d.%d" % sys.version_info[:2]:
            raise Exception((
                "Python in worker has different version %s than that in " +
                "driver %s, PySpark cannot run with different minor versions. "
                + "Please check environment variables PYSPARK_PYTHON and " +
                "PYSPARK_DRIVER_PYTHON are correctly set.") %
                            ("%d.%d" % sys.version_info[:2], version))

        # read inputs only for a barrier task
        isBarrier = read_bool(infile)
        boundPort = read_int(infile)
        secret = UTF8Deserializer().loads(infile)

        # set up memory limits
        memory_limit_mb = int(
            os.environ.get('PYSPARK_EXECUTOR_MEMORY_MB', "-1"))
        if memory_limit_mb > 0 and has_resource_module:
            total_memory = resource.RLIMIT_AS
            try:
                (soft_limit, hard_limit) = resource.getrlimit(total_memory)
                msg = "Current mem limits: {0} of max {1}\n".format(
                    soft_limit, hard_limit)
                print(msg, file=sys.stderr)

                # convert to bytes
                new_limit = memory_limit_mb * 1024 * 1024

                if soft_limit == resource.RLIM_INFINITY or new_limit < soft_limit:
                    msg = "Setting mem limits to {0} of max {1}\n".format(
                        new_limit, new_limit)
                    print(msg, file=sys.stderr)
                    resource.setrlimit(total_memory, (new_limit, new_limit))

            except (resource.error, OSError, ValueError) as e:
                # not all systems support resource limits, so warn instead of failing
                print("WARN: Failed to set memory limit: {0}\n".format(e),
                      file=sys.stderr)

        # initialize global state
        taskContext = None
        if isBarrier:
            taskContext = BarrierTaskContext._getOrCreate()
            BarrierTaskContext._initialize(boundPort, secret)
            # Set the task context instance here, so we can get it by TaskContext.get for
            # both TaskContext and BarrierTaskContext
            TaskContext._setTaskContext(taskContext)
        else:
            taskContext = TaskContext._getOrCreate()
        # read inputs for TaskContext info
        taskContext._stageId = read_int(infile)
        taskContext._partitionId = read_int(infile)
        taskContext._attemptNumber = read_int(infile)
        taskContext._taskAttemptId = read_long(infile)
        taskContext._resources = {}
        for r in range(read_int(infile)):
            key = utf8_deserializer.loads(infile)
            name = utf8_deserializer.loads(infile)
            addresses = []
            taskContext._resources = {}
            for a in range(read_int(infile)):
                addresses.append(utf8_deserializer.loads(infile))
            taskContext._resources[key] = ResourceInformation(name, addresses)

        taskContext._localProperties = dict()
        for i in range(read_int(infile)):
            k = utf8_deserializer.loads(infile)
            v = utf8_deserializer.loads(infile)
            taskContext._localProperties[k] = v

        shuffle.MemoryBytesSpilled = 0
        shuffle.DiskBytesSpilled = 0
        _accumulatorRegistry.clear()

        # fetch name of workdir
        spark_files_dir = utf8_deserializer.loads(infile)
        SparkFiles._root_directory = spark_files_dir
        SparkFiles._is_running_on_worker = True

        # fetch names of includes (*.zip and *.egg files) and construct PYTHONPATH
        add_path(
            spark_files_dir)  # *.py files that were added will be copied here
        num_python_includes = read_int(infile)
        for _ in range(num_python_includes):
            filename = utf8_deserializer.loads(infile)
            add_path(os.path.join(spark_files_dir, filename))

        importlib.invalidate_caches()

        # fetch names and values of broadcast variables
        needs_broadcast_decryption_server = read_bool(infile)
        num_broadcast_variables = read_int(infile)
        if needs_broadcast_decryption_server:
            # read the decrypted data from a server in the jvm
            port = read_int(infile)
            auth_secret = utf8_deserializer.loads(infile)
            (broadcast_sock_file,
             _) = local_connect_and_auth(port, auth_secret)

        for _ in range(num_broadcast_variables):
            bid = read_long(infile)
            if bid >= 0:
                if needs_broadcast_decryption_server:
                    read_bid = read_long(broadcast_sock_file)
                    assert (read_bid == bid)
                    _broadcastRegistry[bid] = \
                        Broadcast(sock_file=broadcast_sock_file)
                else:
                    path = utf8_deserializer.loads(infile)
                    _broadcastRegistry[bid] = Broadcast(path=path)

            else:
                bid = -bid - 1
                _broadcastRegistry.pop(bid)

        if needs_broadcast_decryption_server:
            broadcast_sock_file.write(b'1')
            broadcast_sock_file.close()

        _accumulatorRegistry.clear()
        eval_type = read_int(infile)
        if eval_type == PythonEvalType.NON_UDF:
            func, profiler, deserializer, serializer = read_command(
                pickleSer, infile)
        else:
            func, profiler, deserializer, serializer = read_udfs(
                pickleSer, infile, eval_type)

        init_time = time.time()

        def process():
            iterator = deserializer.load_stream(infile)
            out_iter = func(split_index, iterator)
            try:
                serializer.dump_stream(out_iter, outfile)
            finally:
                if hasattr(out_iter, 'close'):
                    out_iter.close()

        if profiler:
            profiler.profile(process)
        else:
            process()

        # Reset task context to None. This is a guard code to avoid residual context when worker
        # reuse.
        TaskContext._setTaskContext(None)
        BarrierTaskContext._setTaskContext(None)
    except BaseException as e:
        try:
            exc_info = None
            if os.environ.get("SPARK_SIMPLIFIED_TRACEBACK", False):
                tb = try_simplify_traceback(sys.exc_info()[-1])
                if tb is not None:
                    e.__cause__ = None
                    exc_info = "".join(
                        traceback.format_exception(type(e), e, tb))
            if exc_info is None:
                exc_info = traceback.format_exc()

            write_int(SpecialLengths.PYTHON_EXCEPTION_THROWN, outfile)
            write_with_length(exc_info.encode("utf-8"), outfile)
        except IOError:
            # JVM close the socket
            pass
        except BaseException:
            # Write the error to stderr if it happened while serializing
            print("PySpark worker failed with exception:", file=sys.stderr)
            print(traceback.format_exc(), file=sys.stderr)
        sys.exit(-1)
    finish_time = time.time()
    report_times(outfile, boot_time, init_time, finish_time)
    write_long(shuffle.MemoryBytesSpilled, outfile)
    write_long(shuffle.DiskBytesSpilled, outfile)

    # Mark the beginning of the accumulators section of the output
    write_int(SpecialLengths.END_OF_DATA_SECTION, outfile)
    write_int(len(_accumulatorRegistry), outfile)
    for (aid, accum) in _accumulatorRegistry.items():
        pickleSer._write_with_length((aid, accum._value), outfile)

    # check end of stream
    if read_int(infile) == SpecialLengths.END_OF_STREAM:
        write_int(SpecialLengths.END_OF_STREAM, outfile)
    else:
        # write a different value to tell JVM to not reuse this worker
        write_int(SpecialLengths.END_OF_DATA_SECTION, outfile)
        sys.exit(-1)
def GetModule(tlib):
    """Create a module wrapping a COM typelibrary on demand.

    'tlib' must be an ITypeLib COM pointer instance, the pathname of a
    type library, a COM CLSID GUID, or a tuple/list specifying the
    arguments to a comtypes.typeinfo.LoadRegTypeLib call:

      (libid, wMajorVerNum, wMinorVerNum, lcid=0)

    Or it can be an object with _reg_libid_ and _reg_version_
    attributes.

    A relative pathname is interpreted as relative to the callers
    __file__, if this exists.

    This function determines the module name from the typelib
    attributes, then tries to import it.  If that fails because the
    module doesn't exist, the module is generated into the
    comtypes.gen package.

    It is possible to delete the whole comtypes\gen directory to
    remove all generated modules, the directory and the __init__.py
    file in it will be recreated when needed.

    If comtypes.gen __path__ is not a directory (in a frozen
    executable it lives in a zip archive), generated modules are only
    created in memory without writing them to the file system.

    Example:

        GetModule("shdocvw.dll")

    would create modules named

       comtypes.gen._EAB22AC0_30C1_11CF_A7EB_0000C05BAE0B_0_1_1
       comtypes.gen.SHDocVw

    containing the Python wrapper code for the type library used by
    Internet Explorer.  The former module contains all the code, the
    latter is a short stub loading the former.
    """
    pathname = None
    if isinstance(tlib, str):
        # pathname of type library
        if not os.path.isabs(tlib):
            # If a relative pathname is used, we try to interpret
            # this pathname as relative to the callers __file__.
            frame = sys._getframe(1)
            _file_ = frame.f_globals.get("__file__", None)
            if _file_ is not None:
                directory = os.path.dirname(os.path.abspath(_file_))
                abspath = os.path.normpath(os.path.join(directory, tlib))
                # If the file does exist, we use it.  Otherwise it may
                # still be that the file is on Windows search path for
                # typelibs, and we leave the pathname alone.
                if os.path.isfile(abspath):
                    tlib = abspath
        logger.debug("GetModule(%s)", tlib)
        pathname = tlib
        tlib = comtypes.typeinfo.LoadTypeLibEx(tlib)
    elif isinstance(tlib, comtypes.GUID):
        # tlib contain a clsid
        clsid = str(tlib)
        
        # lookup associated typelib in registry
        import winreg
        with winreg.OpenKey(winreg.HKEY_CLASSES_ROOT, r"CLSID\%s\TypeLib" % clsid, 0, winreg.KEY_READ) as key:
            typelib = winreg.EnumValue(key, 0)[1]
        with winreg.OpenKey(winreg.HKEY_CLASSES_ROOT, r"CLSID\%s\Version" % clsid, 0, winreg.KEY_READ) as key:
            version = winreg.EnumValue(key, 0)[1].split(".")
        
        logger.debug("GetModule(%s)", typelib)
        tlib = comtypes.typeinfo.LoadRegTypeLib(comtypes.GUID(typelib), int(version[0]), int(version[1]), 0)
    elif isinstance(tlib, (tuple, list)):
        # sequence containing libid and version numbers
        logger.debug("GetModule(%s)", (tlib,))
        tlib = comtypes.typeinfo.LoadRegTypeLib(comtypes.GUID(tlib[0]), *tlib[1:])
    elif hasattr(tlib, "_reg_libid_"):
        # a COMObject implementation
        logger.debug("GetModule(%s)", tlib)
        tlib = comtypes.typeinfo.LoadRegTypeLib(comtypes.GUID(tlib._reg_libid_),
                                                *tlib._reg_version_)
    else:
        # an ITypeLib pointer
        logger.debug("GetModule(%s)", tlib.GetLibAttr())

    # create and import the module
    mod = _CreateWrapper(tlib, pathname)
    try:
        modulename = tlib.GetDocumentation(-1)[0]
    except comtypes.COMError:
        return mod
    if modulename is None:
        return mod
    if sys.version_info < (3, 0):
        modulename = modulename.encode("mbcs")

    # create and import the friendly-named module
    try:
        mod = _my_import("comtypes.gen." + modulename)
    except Exception as details:
        logger.info("Could not import comtypes.gen.%s: %s", modulename, details)
    else:
        return mod
    # the module is always regenerated if the import fails
    logger.info("# Generating comtypes.gen.%s", modulename)
    # determine the Python module name
    fullname = _name_module(tlib)
    modname = fullname.split(".")[-1]
    code = "from comtypes.gen import %s\nglobals().update(%s.__dict__)\n" % (modname, modname)
    code += "__name__ = 'comtypes.gen.%s'" % modulename
    if comtypes.client.gen_dir is None:
        mod = types.ModuleType("comtypes.gen." + modulename)
        mod.__file__ = os.path.join(os.path.abspath(comtypes.gen.__path__[0]),
                                    "<memory>")
        exec(code, mod.__dict__)
        sys.modules["comtypes.gen." + modulename] = mod
        setattr(comtypes.gen, modulename, mod)
        return mod
    # create in file system, and import it
    ofi = open(os.path.join(comtypes.client.gen_dir, modulename + ".py"), "w")
    ofi.write(code)
    ofi.close()
    # clear the import cache to make sure Python sees newly created modules
    if hasattr(importlib, "invalidate_caches"):
        importlib.invalidate_caches()
    return _my_import("comtypes.gen." + modulename)
Example #47
0
def _make_test_zip_pkg(zip_dir, zip_basename, pkg_name, script_basename,
                       source=test_source, depth=1):
    to_return = make_zip_pkg(zip_dir, zip_basename, pkg_name, script_basename,
                             source, depth)
    importlib.invalidate_caches()
    return to_return
Example #48
0
def _import_modules(modules):
    if modules:
        invalidate_caches()
        return [import_module(x) for x in modules]
    return []
Example #49
0
    def __init__(self, name, yc_soln, local_grids=None):
        """
        Write out a YASK kernel, compile it using the YASK's Makefiles,
        import the corresponding SWIG-generated Python module, and finally
        create a YASK kernel solution object.

        Parameters
        ----------
        name : str
            Unique name of this YaskKernel.
        yc_soln
            The YaskCompiler solution.
        local_grids : list of Array, optional
            A local grid is necessary to run the YaskKernel, but it can be
            deallocated upon returning to Python-land.  For example, local
            grids could be used to implement the temporary arrays introduced by
            the DSE.  This parameter tells which of the ``yc_soln``'s grids are
            local.
        """
        self.name = name

        # Shared object name
        self.soname = "%s.devito.%s" % (name, configuration['platform'])

        if os.path.exists(os.path.join(namespace['yask-pylib'], '%s.py' % name)):
            # Nothing to do -- the YASK solution was compiled in a previous session
            yk = import_module(name)
            debug("cache hit, `%s` imported w/o jitting" % name)
        else:
            # We create and JIT compile a fresh YASK solution

            # The lock manager prevents race conditions
            # `lock_m` is used only to keep the lock manager alive
            with warnings.catch_warnings():
                cleanup_m = CleanupManager()
                lock_m = CacheLockManager(cleanup_m, namespace['yask-output-dir'])  # noqa

            # The directory in which the YASK-generated code (.hpp) will be placed
            yk_codegen = namespace['yask-codegen'](name, 'devito',
                                                   configuration['platform'])
            if not os.path.exists(yk_codegen):
                os.makedirs(yk_codegen)

            # Write out the stencil file
            yk_codegen_file = os.path.join(yk_codegen, namespace['yask-codegen-file'])
            yc_soln.format(configuration['platform'].isa,
                           ofac.new_file_output(yk_codegen_file))

            # JIT-compile it
            compiler = configuration.yask['compiler']
            if configuration['develop-mode']:
                if yc_soln.get_num_equations() == 0:
                    # YASK will compile more quickly, and no price has to be paid
                    # in terms of performance, as this is a void kernel
                    opt_level = 0
                else:
                    opt_level = 1
            else:
                opt_level = 3
            args = [
                '-j', 'YK_CXX=%s' % compiler.cc, 'YK_CXXOPT=-O%d' % opt_level,
                # No MPI support at the moment
                'mpi=0',
                # To locate the YASK compiler
                'YC_EXEC=%s' % os.path.join(namespace['path'], 'bin'),
                # Error out if a grid not explicitly defined in the compiler is created
                'allow_new_grid_types=0',
                # To give a unique name to the generated Python modules, rather
                # than creating `yask_kernel.py`
                'YK_BASE=%s' % name,
                # `stencil` and `arch` should always be provided
                'stencil=%s' % 'devito', 'arch=%s' % configuration['platform'],
                # The root directory of generated code files, shared libs, Python modules
                'YASK_OUTPUT_DIR=%s' % namespace['yask-output-dir'],
                # Pick the YASK kernel Makefile, i.e. the one under `yask/src/kernel`
                '-C', namespace['kernel-path'],
                # Make target
                'api'
            ]
            if configuration['develop-mode']:
                args.append('check=1')   # Activate internal YASK asserts
                args.append('trace=1')   # Print out verbose progress msgs w/-trace knob
                args.append('trace_mem=0')   # Print out verbose mem-access msgs
            make(namespace['path'], args)

            # Now we must be able to import the SWIG-generated Python module
            invalidate_caches()
            yk = import_module(name)

            # Release the lock manager
            cleanup_m.clean_up()

        # Create the YASK solution object
        kfac = yk.yk_factory()
        self.env = kfac.new_env()
        self.soln = kfac.new_solution(self.env)

        # Allow step indices to wrap-around
        self.soln.set_step_wrap(True)

        # Apply any user-provided options, if any.
        # These are applied here instead of just before prepare_solution()
        # so that applicable options will apply to all API calls
        self.soln.apply_command_line_options(configuration.yask['options'] or '')

        # MPI setup: simple rank configuration in 1st dim only.
        # TODO: in production runs, the ranks would be distributed along all
        # domain dimensions
        self.soln.set_num_ranks(self.space_dimensions[0], self.env.get_num_ranks())

        # Redirect stdout to a string or file
        if configuration.yask['dump']:
            filename = 'yk_dump.%s.%s.%s.txt' % (name, configuration['platform'],
                                                 configuration['platform'].isa)
            filename = os.path.join(configuration.yask['dump'], filename)
            self.output = yk.yask_output_factory().new_file_output(filename)
        else:
            self.output = yk.yask_output_factory().new_string_output()
        self.soln.set_debug_output(self.output)

        # Users may want to run the same Operator (same domain etc.) with
        # different grids
        self.grids = {i.get_name(): i for i in self.soln.get_grids()}
        self.local_grids = {i.name: self.grids[i.name] for i in (local_grids or [])}
Example #50
0
    def makemigrations_and_migrate(self):

        # only get apps with labels, otherwise migrate will raise an error
        # when it tries to migrate that app but no migrations dir was created
        app_labels = set(model._meta.app_config.label
                         for model in apps.get_models())

        migrations_modules = {
            app_label: '{}.{}'.format(TMP_MIGRATIONS_DIR, app_label)
            for app_label in app_labels
        }
        settings.MIGRATION_MODULES = migrations_modules

        start = time.time()

        try:
            # makemigrations rarely sends any interesting info to stdout.
            # if there is an error, it will go to stdout,
            # or raise CommandError.
            # if someone needs to see the details of makemigrations,
            # they can do "otree makemigrations".
            with patch('sys.stdout.write'):
                call_command('makemigrations', '--noinput',
                             *migrations_modules.keys())
        except SystemExit as exc:
            # SystemExit will be raised if NonInteractiveMigrationQuestioner
            # cannot decide what to do automatically.
            # SystemExit does not inherit from Exception,
            # so we need to catch it explicitly.
            # without this, the process will just exit and the autoreloader
            # will hang.
            self.print_error_and_exit(ADVICE_FIX_NOT_NULL_FIELD)
        except Exception as exc:
            self.print_error_and_exit(ADVICE_DELETE_TMP)

        # migrate imports some modules that were created on the fly,
        # so according to the docs for import_module, we need to call
        # invalidate_cache.
        # the following line is necessary to avoid a crash I experienced
        # on Mac, because makemigrations tries some imports which cause ImportErrors,
        # messes up the cache on some systems.
        importlib.invalidate_caches()

        try:
            # see above comment about makemigrations and capturing stdout.
            # it applies to migrate command also.
            with patch('sys.stdout.write'):
                # call_command does not add much overhead (0.1 seconds typical)
                call_command('migrate', '--noinput')
        except Exception as exc:
            # it seems there are different exceptions all named
            # OperationalError (django.db.OperationalError,
            # sqlite.OperationalError, mysql....)
            # so, simplest to use the string name

            if type(exc).__name__ in (
                    'OperationalError',
                    'ProgrammingError',
                    'InconsistentMigrationHistory',
            ):
                self.print_error_and_exit(ADVICE_DELETE_DB)
            else:
                raise

        total_time = round(time.time() - start, 1)
        if total_time > 5:
            self.stdout.write(
                'makemigrations & migrate ran in {}s'.format(total_time))
Example #51
0
def dynamic_import(module_name):
    try:
        importlib.invalidate_caches()
    except AttributeError:
        pass
    return importlib.import_module(module_name)
Example #52
0
    def _import(self, name):
        """
        Imports the main module for one model.

        @param      name        model name
        @return                 imported module
        """
        meta = self.get_metadata(name)
        loc = self.get_full_name(name)
        script = os.path.join(loc, meta['main_script'])
        if not os.path.exists(script):
            raise FileNotFoundError(
                "Unable to find script '{0}'".format(script))

        fold, modname = os.path.split(script)
        sys.path.insert(0, self._folder)
        full_modname = ".".join(
            [name.replace("/", "."),
             os.path.splitext(modname)[0]])

        def import_module():
            try:
                mod = importlib.import_module(full_modname)
                # mod = __import__(full_modname)
            except (ImportError, ModuleNotFoundError) as e:
                with open(script, "r") as f:
                    code = f.read()
                values = dict(self_folder=self._folder,
                              name=name,
                              meta=str(meta),
                              loc=loc,
                              script=script,
                              fold=fold,
                              modname=modname,
                              full_modname=full_modname)
                values = '\n'.join('{}={}'.format(k, v)
                                   for k, v in values.items())
                raise ImportError(
                    "Unable to compile file '{0}'\ndue to {1}\n{2}\n---\n{3}".
                    format(script, e, code, values)) from e
            return mod

        try:
            mod = import_module()
        except ImportError as e:
            # Reload modules.
            specs = []
            spl = full_modname.split('.')
            for i in range(len(spl) - 1):
                name = '.'.join(spl[:i + 1])
                if name in sys.modules:
                    del sys.modules[name]
                importlib.invalidate_caches()
                spec = importlib.util.find_spec(name)
                specs.append((name, spec))
                mod = importlib.import_module(name)
                importlib.reload(mod)
            try:
                mod = import_module()
            except ImportError as e:
                del sys.path[0]
                mes = "\n".join("{0}: {1}".format(a, b) for a, b in specs)
                raise ImportError(
                    "Unable to import module '{0}', specs=\n{1}".format(
                        full_modname, mes)) from e

        del sys.path[0]

        if not hasattr(mod, "restapi_load"):
            raise ImportError(
                "Unable to find function 'restapi_load' in module '{0}'".
                format(mod.__name__))
        return mod
Example #53
0
def reload(module):
    importlib.invalidate_caches()
    importlib.reload(module)
Example #54
0
    async def plugins_add(self, ctx, *, plugin_name: str):
        """
        Install a new plugin for the bot.

        `plugin_name` can be the name of the plugin found in `{prefix}plugin registry`,
        or a direct reference to a GitHub hosted plugin (in the format `user/repo/name[@branch]`)
        or `local/name` for local plugins.
        """

        plugin = await self.parse_user_input(ctx,
                                             plugin_name,
                                             check_version=True)
        if plugin is None:
            return

        if str(plugin) in self.bot.config["plugins"]:
            embed = discord.Embed(
                description="This plugin is already installed.",
                color=self.bot.error_color)
            return await ctx.send(embed=embed)

        if plugin.name in self.bot.cogs:
            # another class with the same name
            embed = discord.Embed(
                description="Cannot install this plugin (dupe cog name).",
                color=self.bot.error_color,
            )
            return await ctx.send(embed=embed)

        if plugin.local:
            embed = discord.Embed(
                description=
                f"Starting to load local plugin from {plugin.link}...",
                color=self.bot.main_color,
            )
        else:
            embed = discord.Embed(
                description=
                f"Starting to download plugin from {plugin.link}...",
                color=self.bot.main_color,
            )
        msg = await ctx.send(embed=embed)

        try:
            await self.download_plugin(plugin, force=True)
        except Exception as e:
            logger.warning("Unable to download plugin %s.",
                           plugin,
                           exc_info=True)

            embed = discord.Embed(
                description=
                f"Failed to download plugin, check logs for error.\n{type(e)}: {e}",
                color=self.bot.error_color,
            )

            return await msg.edit(embed=embed)

        self.bot.config["plugins"].append(str(plugin))
        await self.bot.config.update()

        if self.bot.config.get("enable_plugins"):

            invalidate_caches()

            try:
                await self.load_plugin(plugin)
            except Exception as e:
                logger.warning("Unable to load plugin %s.",
                               plugin,
                               exc_info=True)

                embed = discord.Embed(
                    description=
                    f"Failed to download plugin, check logs for error.\n{type(e)}: {e}",
                    color=self.bot.error_color,
                )

            else:
                embed = discord.Embed(
                    description="Successfully installed plugin.\n"
                    "*Friendly reminder, plugins have absolute control over your bot. "
                    "Please only install plugins from developers you trust.*",
                    color=self.bot.main_color,
                )
        else:
            embed = discord.Embed(
                description="Successfully installed plugin.\n"
                "*Friendly reminder, plugins have absolute control over your bot. "
                "Please only install plugins from developers you trust.*\n\n"
                "This plugin is currently not enabled due to `ENABLE_PLUGINS=false`, "
                "to re-enable plugins, remove or change `ENABLE_PLUGINS=true` and restart your bot.",
                color=self.bot.main_color,
            )
        return await msg.edit(embed=embed)
Example #55
0
def run_commands(cmds: List[str]) -> None:
    if cmds:
        _cmds = "\n".join(cmds)
        subprocess.run(args=_cmds, check=True, shell=True)
        if "pip" in _cmds:
            importlib.invalidate_caches()  # ensure new packages are detected
Example #56
0
 def loadmodule(self, modulename):
     importlib.invalidate_caches()
     module = importlib.import_module(modulename)
     module = importlib.reload(module)
     return module
Example #57
0
    def inline_run(self, *args, plugins=(), no_reraise_ctrlc=False):
        """Run ``pytest.main()`` in-process, returning a HookRecorder.

        Runs the :py:func:`pytest.main` function to run all of pytest inside
        the tests process itself.  This means it can return a
        :py:class:`HookRecorder` instance which gives more detailed results
        from that run than can be done by matching stdout/stderr from
        :py:meth:`runpytest`.

        :param args: command line arguments to pass to :py:func:`pytest.main`

        :kwarg plugins: extra plugin instances the ``pytest.main()`` instance should use.

        :kwarg no_reraise_ctrlc: typically we reraise keyboard interrupts from the child run. If
            True, the KeyboardInterrupt exception is captured.

        :return: a :py:class:`HookRecorder` instance
        """
        # (maybe a cpython bug?) the importlib cache sometimes isn't updated
        # properly between file creation and inline_run (especially if imports
        # are interspersed with file creation)
        importlib.invalidate_caches()

        plugins = list(plugins)
        finalizers = []
        try:
            # Do not load user config (during runs only).
            mp_run = MonkeyPatch()
            for k, v in self._env_run_update.items():
                mp_run.setenv(k, v)
            finalizers.append(mp_run.undo)

            # Any sys.module or sys.path changes done while running pytest
            # inline should be reverted after the tests run completes to avoid
            # clashing with later inline tests run within the same pytest tests,
            # e.g. just because they use matching tests module names.
            finalizers.append(self.__take_sys_modules_snapshot().restore)
            finalizers.append(SysPathsSnapshot().restore)

            # Important note:
            # - our tests should not leave any other references/registrations
            #   laying around other than possibly loaded tests modules
            #   referenced from sys.modules, as nothing will clean those up
            #   automatically

            rec = []

            class Collect:
                def pytest_configure(x, config):
                    rec.append(self.make_hook_recorder(config.pluginmanager))

            plugins.append(Collect())
            ret = pytest.main(list(args), plugins=plugins)
            if len(rec) == 1:
                reprec = rec.pop()
            else:

                class reprec:
                    pass

            reprec.ret = ret

            # typically we reraise keyboard interrupts from the child run
            # because it's our user requesting interruption of the testing
            if ret == ExitCode.INTERRUPTED and not no_reraise_ctrlc:
                calls = reprec.getcalls("pytest_keyboard_interrupt")
                if calls and calls[-1].excinfo.type == KeyboardInterrupt:
                    raise KeyboardInterrupt()
            return reprec
        finally:
            for finalizer in finalizers:
                finalizer()
Example #58
0
    def __init__(self, themedir, siteconftemplate):
        self.installdir = sys.path[
            0]  # Note: this should ideally be gotten from somewhere else.
        self.wsgiapp = Bottle()
        self.apiclass = sandbox.csg2api(self.wsgiapp, self.runningsessions)

        # Parse arguments
        argparser = argparse.ArgumentParser()
        argparser.add_argument('sitesfolder',
                               metavar='<site storage folder>',
                               help="path to the folder containing sites")
        argparser.add_argument('siteroot',
                               metavar='<site name>',
                               help="site name/the folder with config.json")
        self.parsedargs = argparser.parse_args()

        # Setup configuration and path to site
        self.sitepath = os.path.abspath(
            os.path.join(self.parsedargs.sitesfolder,
                         self.parsedargs.siteroot))
        siteconffile = open(os.path.join(self.sitepath, "config.json"),
                            mode="rt",
                            encoding="utf-8")
        self.siteconf = configman.normalize_config(json.load(siteconffile),
                                                   self.parsedargs.siteroot)
        siteconffile.close()

        # Setup theming
        themesroot = os.path.abspath(themedir)
        self.themepath = os.path.join(themesroot,
                                      self.siteconf["site"]["theme"])
        os.chdir(self.sitepath)

        # Assign routes (done before the site code to allow overrides)
        # This is functionally equivalent of what the language does, but makes sure Bottle will call the right instance.
        self.getrandstaticredirect = self.wsgiapp.route(
            "/rand/<filepath:path>")(self.getrandstaticredirect)
        self.getstatic = self.wsgiapp.route("/static/<filepath:path>")(
            self.getstatic)
        self.compilethemesass = self.wsgiapp.route("/theme/sass/master.scss")(
            self.compilethemesass)
        self.getthemeasset = self.wsgiapp.route(
            "/theme/static/<filepath:path>")(self.getthemeasset)
        self.compilesass = self.wsgiapp.route("/sass/<filename:re:.*\.scss>")(
            self.compilesass)
        self.catchall = self.wsgiapp.route("/")(
            self.wsgiapp.route("/<filepath:path>")(view(
                os.path.join(self.themepath, "master.tpl"))(self.catchall)))
        self.dologin = self.wsgiapp.route("/login",
                                          method="POST")(self.dologin)

        # If they have code, run it
        if "additional_code" in self.siteconf["site"]:
            oldpath = sys.path
            sys.path[0] = self.sitepath
            importlib.invalidate_caches()
            with open(os.path.join(self.sitepath,
                                   self.siteconf["site"]["additional_code"]),
                      mode="rt") as codefile:
                sandbox.create_box(
                    codefile.read(), self.wsgiapp, apiclass=self.apiclass
                )  # This file is excempt from the linking clauses in the license, allowing it to be non-(A)GPL.
            sys.path = oldpath
            importlib.invalidate_caches()

        # Configure Nginx
        socketpath = "/tmp/csg2_{}.sock".format(
            self.siteconf["site"]["domain_name"].replace(".", "_"))
        print("-> Generating config.")
        with open(os.path.abspath(siteconftemplate),
                  mode="rt",
                  encoding="utf-8") as sitetemplate:
            sitetemplatetxt = sitetemplate.read()
            newsite = sitetemplatetxt.replace(
                "%%SERVERNAME%%",
                self.siteconf["site"]["domain_name"]).replace(
                    "%%SOCKETPATH%%", socketpath)
            with open("/tmp/{}.csg2nginx".format(
                    self.siteconf["site"]["domain_name"].replace(".", "_")),
                      mode="wt",
                      encoding="utf-8") as newconf:
                newconf.write(newsite)

        # Serve site.
        print("-> Serving up site on '{}'.".format(socketpath))
        waitress_serve(self.wsgiapp, unix_socket=socketpath)
Example #59
0
    async def load_module(
        self, doc, message, name=None, origin="<string>", did_requirements=False
    ):
        if re.search(r"# ?scope: ?non_heroku", doc) and 'DYNO' in os.environ:
            if isinstance(message, Message):
                await utils.answer(message, self.strings("non_heroku"))
            return

        if re.search(r"# ?scope: ?ffmpeg", doc) and os.system('ffmpeg -version'):  # skipcq: BAN-B605, BAN-B607
            if isinstance(message, Message):
                await utils.answer(message, self.strings("ffmpeg_required"))
            return

        if re.search(r"# ?scope: ?inline", doc) and not self.inline.init_complete:
            if isinstance(message, Message):
                await utils.answer(message, self.strings("inline_init_failed"))
            return

        if re.search(r"# ?scope: ?geektg_min", doc):
            ver = re.search(r"# ?scope: ?geektg_min ([0-9]+\.[0-9]+\.[0-9]+)", doc).group(1)
            ver_ = tuple(map(int, ver.split('.')))
            if main.__version__ < ver_:
                await utils.answer(message, self.strings('version_incompatible').format(ver))
                return

        developer = re.search(r"# ?meta developer: ?(.+)", doc)
        developer = developer.group(1) if developer else False
        developer = self.strings('developer').format(developer) if developer else ""

        if name is None:
            uid = "__extmod_" + str(uuid.uuid4())
        else:
            uid = name.replace("%", "%%").replace(".", "%d")

        module_name = "friendly-telegram.modules." + uid

        try:
            try:
                spec = ModuleSpec(module_name, StringLoader(doc, origin), origin=origin)
                instance = self.allmodules.register_module(spec, module_name, origin)
            except ImportError:
                logger.info(
                    "Module loading failed, attemping dependency installation",
                    exc_info=True,
                )
                # Let's try to reinstall dependencies
                requirements = list(
                    filter(
                        lambda x: x and x[0] not in ("-", "_", "."),
                        map(str.strip, VALID_PIP_PACKAGES.search(doc)[1].split(" ")),
                    )
                )

                logger.debug("Installing requirements: %r", requirements)

                if not requirements:
                    raise  # we don't know what to install

                if did_requirements:
                    if message is not None:
                        await utils.answer(
                            message, self.strings("requirements_restart", message)
                        )

                    return True  # save to database despite failure, so it will work after restart

                if message is not None:
                    await utils.answer(
                        message, self.strings("requirements_installing", message)
                    )

                pip = await asyncio.create_subprocess_exec(
                    sys.executable,
                    "-m",
                    "pip",
                    "install",
                    "--upgrade",
                    "-q",
                    "--disable-pip-version-check",
                    "--no-warn-script-location",
                    *["--user"] if USER_INSTALL else [],
                    *requirements,
                )

                rc = await pip.wait()

                if rc != 0:
                    if message is not None:
                        await utils.answer(
                            message, self.strings("requirements_failed", message)
                        )

                    return False

                importlib.invalidate_caches()

                return await self.load_module(
                    doc, message, name, origin, True
                )  # Try again
            except loader.LoadError as e:
                if message:
                    await utils.answer(message, f"🚫 <b>{utils.escape_html(str(e))}</b>")
                return
        except BaseException as e:  # That's okay because it might try to exit or something, who knows.
            logger.exception(f"Loading external module failed due to {e}")

            if message is not None:
                await utils.answer(message, self.strings("load_failed", message))

            return False

        instance.inline = self.inline
        if hasattr(instance, '__version__') and isinstance(instance.__version__, tuple):
            version = "<b><i> (v" + ".".join(list(map(str, list(instance.__version__)))) + ")</i></b>"
        else:
            version = ""

        try:
            try:
                self.allmodules.send_config_one(instance, self._db, self.babel)
                await self.allmodules.send_ready_one(
                    instance, self._client, self._db, self.allclients
                )
            except loader.LoadError as e:
                if message:
                    await utils.answer(message, f"🚫 <b>{utils.escape_html(str(e))}</b>")
                return
        except Exception as e:
            logger.exception(f"Module threw because {e}")

            if message is not None:
                await utils.answer(message, self.strings("load_failed", message))

            return False

        if message is not None:
            try:
                modname = instance.strings("name", message)
            except KeyError:
                modname = getattr(instance, "name", "ERROR")

            modhelp = ""
            prefix = utils.escape_html(
                (self._db.get(main.__name__, "command_prefix", False) or ".")
            )

            if instance.__doc__:
                modhelp += (
                    f"<i>\nℹ️ {utils.escape_html(inspect.getdoc(instance))}</i>\n"
                )

            if re.search(r"# ?scope: ?disable_onload_docs", doc):
                return await utils.answer(
                    message,
                    self.strings("loaded", message).format(modname.strip(), version, modhelp) + developer,
                )

            for _name, fun in instance.commands.items():
                modhelp += self.strings("single_cmd", message).format(prefix, _name)

                if fun.__doc__:
                    modhelp += utils.escape_html(inspect.getdoc(fun))
                else:
                    modhelp += self.strings("undoc_cmd", message)

            if self.inline.init_complete:
                if hasattr(instance, "inline_handlers"):
                    for _name, fun in instance.inline_handlers.items():
                        modhelp += self.strings("ihandler", message).format(
                            f"@{self.inline._bot_username} {_name}"
                        )

                        if fun.__doc__:
                            modhelp += utils.escape_html(
                                "\n".join(
                                    [
                                        line.strip()
                                        for line in inspect.getdoc(fun).splitlines()
                                        if not line.strip().startswith("@")
                                    ]
                                )
                            )
                        else:
                            modhelp += self.strings("undoc_ihandler", message)

                if hasattr(instance, "callback_handlers"):
                    for _name, fun in instance.callback_handlers.items():
                        modhelp += self.strings("chandler", message).format(_name)

                        if fun.__doc__:
                            modhelp += utils.escape_html(
                                "\n".join(
                                    [
                                        line.strip()
                                        for line in inspect.getdoc(fun).splitlines()
                                        if not line.strip().startswith("@")
                                    ]
                                )
                            )
                        else:
                            modhelp += self.strings("undoc_chandler", message)

            try:
                await utils.answer(
                    message,
                    self.strings("loaded", message).format(modname.strip(), version, modhelp) + developer,
                )
            except telethon.errors.rpcerrorlist.MediaCaptionTooLongError:
                await message.reply(
                    self.strings("loaded", message).format(modname.strip(), version, modhelp) + developer
                )

        return True
    def do_GET(self):
        # Deny favicon shortcut early.
        if self.path == "/favicon.ico":
            return None

        importlib.invalidate_caches()
        code = 200
        if self.path == "/":
            modules = [
                pdoc.import_module(module, reload=True)
                for module in self.args.modules
            ]
            modules = sorted((module.__name__, inspect.getdoc(module))
                             for module in modules)
            out = pdoc._render_template('/html.mako',
                                        modules=modules,
                                        **self.template_config)
        elif self.path.endswith(".ext"):
            # External links are a bit weird. You should view them as a giant
            # hack. Basically, the idea is to "guess" where something lives
            # when documenting another module and hope that guess can actually
            # track something down in a more global context.
            #
            # The idea here is to start specific by looking for HTML that
            # exists that matches the full external path given. Then trim off
            # one component at the end and try again.
            #
            # If no HTML is found, then we ask `pdoc` to do its thang on the
            # parent module in the external path. If all goes well, that
            # module will then be able to find the external identifier.

            import_path = self.path[:-4].lstrip("/")
            resolved = self.resolve_ext(import_path)
            if resolved is None:  # Try to generate the HTML...
                print("Generating HTML for %s on the fly..." % import_path,
                      file=sys.stderr)
                try:
                    out = pdoc.html(
                        import_path.split(".")[0], **self.template_config)
                except Exception as e:
                    print('Error generating docs: {}'.format(e),
                          file=sys.stderr)
                    # All hope is lost.
                    code = 404
                    out = "External identifier <code>%s</code> not found." % import_path
            else:
                return self.redirect(resolved)
        # Redirect '/pdoc' to '/pdoc/' so that relative links work
        # (results in '/pdoc/cli.html' instead of 'cli.html')
        elif not self.path.endswith(('/', '.html')):
            return self.redirect(self.path + '/')
        # Redirect '/pdoc/index.html' to '/pdoc/' so it's more pretty
        elif self.path.endswith(pdoc._URL_PACKAGE_SUFFIX):
            return self.redirect(self.path[:-len(pdoc._URL_PACKAGE_SUFFIX)] +
                                 '/')
        else:
            try:
                out = self.html()
            except Exception:
                import traceback
                from html import escape
                code = 404
                out = "Error importing module <code>{}</code>:\n\n<pre>{}</pre>".format(
                    self.import_path_from_req_url,
                    escape(traceback.format_exc()))
                out = out.replace('\n', '<br>')

        self.send_response(code)
        self.send_header("Content-type", "text/html; charset=utf-8")
        self.end_headers()
        self.echo(out)