Ejemplo n.º 1
0
 def __init__(self, conf, directory, destination=None, intermediate=None,
                                                       suffix="cpp",
                                                       prefix="yodogg",
                                                       use_cdb=True,
                                                       do_shared=True, do_static=True,
                                                       do_preload=True,
                                                     **kwargs):
     if not conf:
         raise CompilerError("A config-ish instance is required")
     if not suffix:
         suffix = "cpp"
     if not prefix:
         prefix = "yodogg"
     self.MAXIMUM =  int(kwargs.pop('maximum', DEFAULT_MAXIMUM_GENERATOR_COUNT))
     self.VERBOSE = bool(kwargs.pop('verbose', DEFAULT_VERBOSITY))
     self.conf = conf
     self.prefix = u8str(prefix)
     self.suffix = u8str(suffix).lower()
     self.do_shared = bool(do_shared)
     self.do_static = bool(do_static)
     self.do_preload = bool(do_shared) and bool(do_preload)
     self.use_cdb = bool(use_cdb)
     self.directory = Directory(pth=directory)
     if not self.directory.exists:
         raise CompilerError(f"Non-existant generator source directory: {self.directory}")
     self.destination = Directory(pth=destination)
     if not self.destination.exists:
         self.destination.makedirs()
     self.library = self.destination.subpath(f"{self.prefix}{SHARED_LIBRARY_SUFFIX}")
     self.archive = self.destination.subpath(f"{self.prefix}{STATIC_LIBRARY_SUFFIX}")
     self.intermediate = Intermediate(pth=intermediate)
     if not self.intermediate.exists:
         self.intermediate.makedirs()
     cdb = kwargs.pop('cdb', None)
     self.cdb = self.use_cdb and (cdb or CDBJsonFile(directory=self.intermediate)) or None
     self._precompiled = False
     self._compiled = False
     self._postcompiled = False
     self._linked = False
     self._archived = False
     self._preloaded = False
     self.sources = OCDList()
     self.prelink = OCDList()
     self.link_result = tuple()
     self.archive_result = tuple()
     self.preload_result = None
     if self.VERBOSE:
         print("")
         print("Initialized Halide generator compile/load/run suite:")
         print(f"* Config class: {self.conf.name}")
         print(f"* Using source: {self.directory}")
         print(f"* With targets: {self.destination}")
         if do_shared:
             print(f"*      Library: {self.library}")
         if do_static:
             print(f"*      Archive: {self.archive}")
         if use_cdb:
             print(f"*   Compile DB: {repr(self.cdb)}")
         print(f"* Intermediate: {self.intermediate}")
         print("")
Ejemplo n.º 2
0
 def __init__(self, directory=None):
     super(CDBJsonFile, self).__init__()
     if not directory:
         directory = os.getcwd()
     self.directory = Directory(pth=directory)
     self.target = self.directory.subpath(self.filename)
     self.read_from = None
     self.written_to = None
Ejemplo n.º 3
0
def test(MAXIMUM_GENERATORS=255):
    
    """ Run the inline tests for the halogen.compile module """
    
    import tempfile
    from contextlib import ExitStack
    from pprint import pprint
    
    if __package__ is None or __package__ == '':
        import api # type: ignore
        from utils import terminal_width
    else:
        from . import api
        from .utils import terminal_width
    
    directory = Directory(pth="/Users/fish/Dropbox/halogen/tests/generators")
    destination = Directory(pth=os.path.join(tempfile.gettempdir(), "yodogg"))
    zip_destination = os.path.realpath("/tmp")
    
    with TemporaryDirectory(prefix='yo-dogg-') as td:
        
        if not td.exists:
            print("X> TemporaryDirectory DOES NOT EXIST:")
            print(f"X> {td}")
        
        # We use a contextlib.ExitStack instance to separate out the construction
        # of the halogen.compile.Generators instance (q.v. immediately below) and
        # the call to __enter__ (q.v. right after that) so as to trap any and all
        # exceptions that may be thrown individually in either the constructor call
        # -- e.g. Generators.__init__ -- or Generators.__enter__ …
        
        stack = ExitStack()
        
        gens = Generators(CONF, directory=directory,
                                destination=td,
                                intermediate=td.subdirectory(".intermediate"),
                                maximum=MAXIMUM_GENERATORS,
                                verbose=DEFAULT_VERBOSITY,
                                use_cdb=True)
        
        # Preserve compilation artifacts:
        # td.do_not_destroy()
        
        try:
            # Calls Generators.__enter__(self=gens):
            stack.enter_context(gens)
        except CompilerError as exc:
            print_exception(exc)
            # gens.precompile() and gens.compile()
        except CompileDatabaseError as exc:
            print_exception(exc)
            # gens.precompile() and gens.compile()
        except LinkerError as exc:
            print_exception(exc)
            # if gens.compiled and gens.do_static:
            #     gens.arch()
        except ArchiverError as exc:
            print_exception(exc)
            # if gens.compiled and gens.do_shared:
            #     gens.link()
            # if gens.linked and gens.do_preload:
            #     gens.preload_all()
        except GeneratorLoaderError as exc:
            print_exception(exc)
        except GenerationError as exc:
            print_exception(exc)
        else:
            with stack: # Exiting this scope calls Generators.__exit__(self=gens):
                
                precompiled = gens.precompiled and "YES" or "no"
                compiled = gens.compiled and "YES" or "no"
                postcompiled = gens.postcompiled and "YES" or "no"
                linked = gens.linked and "YES" or "no"
                archived = gens.archived and "YES" or "no"
                preloaded = gens.preloaded and "YES" or "no"
                
                print("")
                print(f"IS IT PRECOMPILED? -- {precompiled}")
                print(f"IS IT COMPILED? -- {compiled}")
                print(f"IS IT POSTCOMPILED? -- {postcompiled}")
                print(f"IS IT LINKED? -- {linked}")
                print(f"IS IT ARCHIVED? -- {archived}")
                print(f"IS IT PRELOADED? -- {preloaded}")
                print("")
                
                print(f"LIBRARY: {gens.library}")
                if gens.linked and os.path.exists(gens.library):
                    print("LIBRARY FILE EXISTS")
                
                print(f"ARCHIVE: {gens.archive}")
                if gens.archived and os.path.exists(gens.archive):
                    print("ARCHIVE FILE EXISTS")
                
                print(f"REGISTERED GENERATORS: {api.registered_generators()}")
                
                # loaded_generators = gens.loaded_generators()
                
                if DEFAULT_VERBOSITY:
                    if gens.loaded_count > 0:
                        print(f"... SUCCESSFULLY LOADED GENERATORS FROM LIBRARY {gens.library}")
                        print(f"... THERE ARE {gens.loaded_count} GENERATORS LOADED FROM THAT LIBRARY, DOGG")
                    else:
                        print(f"... NO GENERATORS COULD BE LOADED FROM LIBRARY {gens.library}")
                
                # Run generators:
                generated = gens.run(emit='expanded')
                
                print('')
                pprint(generated, indent=4,
                                  width=terminal_width)
                print('')
                
                # Copy the library and archive files to $TMP/yodogg:
                if destination.exists:
                    if DEFAULT_VERBOSITY:
                        print(f"Removing destination: {destination} …")
                    rm_rf(destination)
                
                if DEFAULT_VERBOSITY:
                    print(f"Copying from {td} to {destination} …")
                td.copy_all(destination)
                
                with TemporaryName(suffix="zip", parent=zip_destination) as tz:
                    if DEFAULT_VERBOSITY:
                        print(f"Zip-archiving destination contents to zipfile: {tz} …")
                    destination.zip_archive(tz)
                
                if gens.intermediate.exists:
                    if CDBJsonFile.in_directory(gens.intermediate):
                        if DEFAULT_VERBOSITY:
                            print("")
                            print(f"Found compilation DB file “{CDBJsonFile.filename}” in intermediate: {gens.intermediate}:")
                            with CDBJsonFile(directory=gens.intermediate) as cdb:
                                pprint(cdb.entries, indent=4,
                                                    width=terminal_width)
                    if DEFAULT_VERBOSITY:
                        print("")
                        print(f"Listing files at intermediate: {gens.intermediate} …")
                    intermediate_list = OCDList(gens.intermediate.subpath(listentry) \
                                                for listentry in gens.intermediate.ls())
                    pprint(listify(*intermediate_list), indent=4,
                                                        width=terminal_width)
                else:
                    print("X> Intermediate directory DOES NOT EXIST")
                    print(f"X> {gens.intermediate}")
                
                if destination.exists:
                    if DEFAULT_VERBOSITY:
                        print("")
                        print(f"Listing files at destination: {destination} …")
                    destination_list = OCDList(destination.subpath(listentry) \
                                               for listentry in destination.ls())
                    pprint(listify(*destination_list), indent=4,
                                                       width=terminal_width)
                    if DEFAULT_VERBOSITY:
                        print(f"Removing destination: {destination} …")
                    rm_rf(destination)
                else:
                    print("X> Destination directory DOES NOT EXIST")
                    print(f"X> {destination}")
Ejemplo n.º 4
0
class Generators(contextlib.AbstractContextManager):
    
    """ Atomically compile all C++ source files from a given directory tree as generators,
        using a config instance (q.v. Generator, above) and then link all of them as a dynamic
        shared-object library. As a context manager, all of the intermediate Generator instances
        created during compilation (because that is how it works dogg, like by using a Generator
        for each discovered source file, OK) use a TemporaryName as their output targets -- so
        it's like POOF, no fuss no muss, basically
    """
    
    emits = {
         'default' : OCDFrozenSet(default_emits),
         'minimal' : OCDFrozenSet(default_emits),
        'expanded' : OCDFrozenSet(('static_library',
                                   'stmt_html',
                                   'h', 'o',
                                   'cpp',
                                   'python_extension')),
            'all' : valid_emits
    }
    
    def __init__(self, conf, directory, destination=None, intermediate=None,
                                                          suffix="cpp",
                                                          prefix="yodogg",
                                                          use_cdb=True,
                                                          do_shared=True, do_static=True,
                                                          do_preload=True,
                                                        **kwargs):
        if not conf:
            raise CompilerError("A config-ish instance is required")
        if not suffix:
            suffix = "cpp"
        if not prefix:
            prefix = "yodogg"
        self.MAXIMUM =  int(kwargs.pop('maximum', DEFAULT_MAXIMUM_GENERATOR_COUNT))
        self.VERBOSE = bool(kwargs.pop('verbose', DEFAULT_VERBOSITY))
        self.conf = conf
        self.prefix = u8str(prefix)
        self.suffix = u8str(suffix).lower()
        self.do_shared = bool(do_shared)
        self.do_static = bool(do_static)
        self.do_preload = bool(do_shared) and bool(do_preload)
        self.use_cdb = bool(use_cdb)
        self.directory = Directory(pth=directory)
        if not self.directory.exists:
            raise CompilerError(f"Non-existant generator source directory: {self.directory}")
        self.destination = Directory(pth=destination)
        if not self.destination.exists:
            self.destination.makedirs()
        self.library = self.destination.subpath(f"{self.prefix}{SHARED_LIBRARY_SUFFIX}")
        self.archive = self.destination.subpath(f"{self.prefix}{STATIC_LIBRARY_SUFFIX}")
        self.intermediate = Intermediate(pth=intermediate)
        if not self.intermediate.exists:
            self.intermediate.makedirs()
        cdb = kwargs.pop('cdb', None)
        self.cdb = self.use_cdb and (cdb or CDBJsonFile(directory=self.intermediate)) or None
        self._precompiled = False
        self._compiled = False
        self._postcompiled = False
        self._linked = False
        self._archived = False
        self._preloaded = False
        self.sources = OCDList()
        self.prelink = OCDList()
        self.link_result = tuple()
        self.archive_result = tuple()
        self.preload_result = None
        if self.VERBOSE:
            print("")
            print("Initialized Halide generator compile/load/run suite:")
            print(f"* Config class: {self.conf.name}")
            print(f"* Using source: {self.directory}")
            print(f"* With targets: {self.destination}")
            if do_shared:
                print(f"*      Library: {self.library}")
            if do_static:
                print(f"*      Archive: {self.archive}")
            if use_cdb:
                print(f"*   Compile DB: {repr(self.cdb)}")
            print(f"* Intermediate: {self.intermediate}")
            print("")
    
    @property
    def precompiled(self):
        """ Have all generator sources been gathered? """
        return self._precompiled
    
    @property
    def compiled(self):
        """ Have all generators successfully been compiled? """
        return self._compiled
    
    @property
    def postcompiled(self):
        """ Has the compilation database (if any) been written? """
        return self._postcompiled
    
    @property
    def linked(self):
        """ Have all generators successfully been dynamically linked? """
        return self._linked
    
    @property
    def archived(self):
        """ Have all generators successfully been statically linked (née archived)? """
        return self._archived
    
    @property
    def preloaded(self):
        """ Have all dynamically-linked generators successfully been preloaded? """
        return self._preloaded
    
    @property
    def source_count(self):
        """ Number (int) of generator sources found """
        return len(self.sources)
    
    @property
    def prelink_count(self):
        """ Number (int) of compiled but as-of-yet unlinked generators """
        return len(self.prelink)
    
    @property
    def object_suffix(self):
        """ The object-file suffix corresponding to the file suffix for this instance.
            Like e.g. if you initialized your instance like
            
            >>> generators = halogen.compile.Generators(suffix="cc")
            
            … your `generators.object_suffix` value will be “cc.o” -- as in, all of the
            pre-linked object code compilation artifacts will be named “something.cc.o”
            or whatever.
        """
        return f"{self.suffix}{os.extsep}o"
    
    @property
    def compilation_database(self):
        if self.use_cdb:
            return self.cdb.name
        return None
    
    def precompile(self):
        """ Walk the path of the specified source directory, gathering all C++ generator
            source files that match the suffix furnished in the constructor, and storing
            the full filesystem paths of these files in the `self.sources` list of strings.
            
            This function returns a boolean indicating success or failure; gathering one or
            more source files is considered success, and finding no matches is a failure.
        """
        if self.precompiled:
            return True
        if self.VERBOSE:
            print(f"Scanning {self.directory} for “{self.suffix}” files")
        for path, dirs, files in self.directory.walk(followlinks=True):
            for df in files:
                if df.lower().endswith(self.suffix):
                    self.sources.append(os.path.realpath(
                                        os.path.join(path, df)))
        if self.source_count < self.MAXIMUM:
            if self.VERBOSE:
                print(f"Using {self.source_count} found generator sources")
                print("")
            self.MAXIMUM = self.source_count
        else:
            if self.VERBOSE:
                print(f"Using {self.MAXIMUM} of {self.source_count} generator sources found")
                print("")
            self.sources = OCDList(self.sources[:self.MAXIMUM])
        if self.source_count > 0:
            self._precompiled = True
        return self.precompiled
    
    def compile_all(self):
        """ Attempt to compile all of the generator source files we discovered while walking
            the directory with which we were initialized.
            
            Internally, we use a halogen.filesystem.TemporaryName and a halogen.compile.Generator
            instance, both within context-managed nested scopes, for atomic operations. The
            return value is boolean: True if all discovered source files were successfully compiled
            and False if not -- in many such cases, one of the many sub-operations can and will
            throw an exception (q.v. halogen.errors supra).
        """
        if self.compiled:
            return True
        if not self.precompiled:
            raise CompilerError(f"can't compile before precompilation: {self.directory}")
        if self.source_count < 1:
            raise CompilerError(f"can't find any compilation inputs: {self.directory}")
        if self.VERBOSE:
            print(f"Compiling {self.source_count} generator source files")
        for source in self.sources:
            sourcebase = os.path.basename(source)
            splitbase = os.path.splitext(sourcebase)
            with TemporaryName(prefix=splitbase[0],
                               suffix=self.object_suffix) as tn:
                with Generator(self.conf, cdb=self.cdb,
                                          source=source,
                                          destination=os.fspath(tn),
                                          intermediate=os.fspath(self.intermediate),
                                          verbose=self.VERBOSE) as gen:
                    if gen.compiled:
                        gen.do_not_destroy()
                        self.prelink.append(tn.do_not_destroy())
        if self.VERBOSE:
            print("")
        if self.source_count == self.prelink_count:
            self._compiled = True
        return self.compiled
    
    def postcompile(self):
        """ If compilation has previously been successful, the `postcompile()` method will,
            if the `use_cdb` initializatiion option was True, attempt to write out a compilation
            database JSON file, using either the internal `self.cdb` compilation database
            instance, or, optionally, a compilation database of the users’ choosing, passed in
            at initialization as `cdb`.
            
            For more on the subject, q.v. http://clang.llvm.org/docs/JSONCompilationDatabase.html,
            the CompDB project at https://github.com/Sarcasm/compdb, or the source of the module
            `halogen.compiledb` supra.
        """
        if self.postcompiled:
            return True
        if not self.compiled:
            raise CompileDatabaseError(f"can't postcompile before compilation: {self.directory}")
        if self.prelink_count < 1:
            raise CompileDatabaseError(f"couldn't find any compilation outputs: {self.directory}")
        if self.VERBOSE:
            print(f"Writing {self.cdb.length} compilation database entries")
        self.cdb.write()
        if self.VERBOSE:
            print("")
        if self.compilation_database:
            if os.path.isfile(self.compilation_database):
                self._postcompiled = True
        return self.postcompiled
    
    def link(self):
        """ If compilation has previously been successful, the `link()` method will attempt
            to link all of the compiled object code artifacts into a dynamic-link library file,
            per the host platform (e.g. a DLL file on Windows*, a dylib on Mac OS X, a shared
            object binary on Linux and Solaris, etc).
            
            The `link()` method considers the discovery of an existing dynamic-link library file
            to be an error condition -- it will not, at the time of writing, overwrite a file
            at its destination path.
            
            * - that is, if this code ever runs on Windows, which I think would take some
                kind of crazy miracle, and/or someone giving me a Windows machine and a ton
                of spare time… you never know but I dunno
        """
        if self.linked:
            return True
        if not self.compiled:
            raise LinkerError(f"can't link before compilation: {self.directory}")
        if self.prelink_count < 1:
            raise LinkerError(f"no files available for linker: {self.directory}")
        if os.path.exists(self.library):
            raise LinkerError(f"can't overwrite linker output: {self.library}")
        if self.VERBOSE:
            # print("")
            print(f"Linking {self.prelink_count} generators as {os.path.basename(self.library)}")
            print("")
        self.link_result += config.LD(self.conf,
                                      self.library,
                                     *self.prelink, verbose=self.VERBOSE)
        if len(self.link_result) > 0: # apres-link
            self._linked = os.path.isfile(self.library)
        if not self.linked:
            if len(self.link_result[1]) > 0: # failure
                raise LinkerError(self.link_result[1])
            raise LinkerError(f"Dynamic-link library file wasn’t created: {self.library}")
        return self.linked
    
    def arch(self):
        """ If compilation has previously been successful, the `arch()` method will attempt
            to link all of the compiled object code artifacts into a static-link library file,
            per the host platform (e.g. a “lib” file on Windows*, a “.a” archive file on Mac
            OS X, Linux and Solaris, etc).
            
            The `arch()` method considers the discovery of an existing static-link library file
            to be an error condition -- it will not, at the time of writing, overwrite a file
            at its destination path.
            
            * - that is, if this code ever runs on Windows, which I think would take some
                kind of crazy miracle, and/or someone giving me a Windows machine and a ton
                of spare time… you never know but I dunno
        """
        if self.archived:
            return True
        if not self.compiled:
            raise ArchiverError(f"can't archive before compilation: {self.directory}")
        if self.prelink_count < 1:
            raise ArchiverError(f"no files available for archiver: {self.directory}")
        if os.path.exists(self.archive):
            raise ArchiverError(f"can't overwrite archiver output: {self.archive}")
        if self.VERBOSE:
            # print("")
            print(f"Archiving {self.prelink_count} generators as {os.path.basename(self.archive)}")
            print("")
        self.archive_result += config.AR(self.conf,
                                         self.archive,
                                        *self.prelink, verbose=self.VERBOSE)
        if len(self.archive_result) > 0: # apres-arch
            self._archived = os.path.isfile(self.archive)
        if not self.archived:
            if len(self.archive_result[1]) > 0: # failure
                raise ArchiverError(self.archive_result[1])
            raise ArchiverError(f"Static library archive file wasn’t created: {self.archive}")
        return self.archived
    
    def preload_all(self):
        """ If both compilation and dynamic-library linking have been successful -- that is to
            say, both the `compile_all()` and `link()` have been successfully called without error,
            the `preload_all()` method will attempt to dynamic-link-load the binary library file
            generated by the `link()` call into the current running process.
            
            This method returns an object representing the result of the library-load call made
            to the `ctypes` module method `ctypes.cdll.LoadLibrary(…)`. The value of the read-only
            property `generators.preloaded` will thereafter appear as `True` iff the call to
            `halogen.generate.preload(…)` was able to successfully load the library via ctypes.
            
            Exceptions of type `halogen.errors.GeneratorLoaderError` can raise if things go awry.
        """
        # preload() may also raise GeneratorLoaderError:
        if self.preloaded:
            return self.preload_result
        if self.compiled and self.linked:
            if self.VERBOSE:
                # print("")
                print(f"Preloading generators from {self.library}")
                # print("")
            try:
                self.preload_result = preload(self.library, verbose=self.VERBOSE)
            except GeneratorLoaderError as preload_error:
                raise preload_error
            else:
                self._preloaded = True
                return self.preload_result
        raise GeneratorLoaderError("can't preload from an uncompiled/unlinked generator")
    
    def loaded_generators(self):
        """ Return a tuple containing the names of all successfully loaded and currently available
            generator modules.
            
            This `loaded_generators()` method calls `halogen.api.registered_generators()`, which
            uses Cython’s C++ bridge to call `Halide::GeneratorRegistry::enumerate()` and convert
            the returned `std::vector<std::string>` into a Python set of Python strings. That is,
            if the instance of `halogen.compile.Generators` has previously successfully ran its
            compilation phase, its link-dynamic phase, and its preload phase -- if not, it’ll
            just toss back an empty set without making any calls into Halide whatsoever.
        """
        if self.preloaded:
            if __package__ is None or __package__ == '':
                import api # type: ignore
            else:
                from . import api
            return OCDFrozenSet(api.registered_generators())
        return OCDFrozenSet()
    
    @property
    def loaded_count(self):
        """ Number (int) of dynamic-link-loaded generator modules currently available """
        return len(self.loaded_generators())
    
    def run(self, target=None, emit=None, substitutions=None):
        """ Use the halogen.compile.Generators.run(…) method to run generators.
            
            All generator code that this instance knows about must have been previously compiled,
            dynamically linked, and preloaded. Assuming that all of these generators were properly
            programmed, they will then be available to halogen via the Halide Generator API --
            specifically the Generator Registry (q.v. `loaded_generators()` method docstring, supra).
        """
        # Check self-status:
        if not self.precompiled:
            raise GenerationError("Can’t run() before first precompiling, compiling, dynamic-linking, and preloading")
        if not self.compiled:
            raise GenerationError("Can’t run() before first compiling, dynamic-linking, and preloading")
        if not self.linked:
            raise GenerationError("Can’t run() before first dynamic-linking and preloading")
        if not self.preloaded:
            raise GenerationError("Can’t run() before first preloading")
        if self.loaded_count < 1:
            raise GenerationError("Can’t run() without one or more loaded generators")
        
        # Check args:
        if not target:
            target = 'host'
        
        if not substitutions:
            substitutions = {}
        
        emits = type(self).emits
        if not emit:
            emit = tuplize(*emits['default'])
        elif is_string(emit):
            emit = u8str(emit)
            if emit in emits:
                emit = tuplize(*emits.get(emit))
            else:
                possibles = ", ".join(OCDList(emits.keys()))
                raise GenerationError("String value for “emit” when calling Generators::run(…) "
                                     f"must be one of: {possibles}")
        else:
            emit = tuplize(*emit)
        
        if len(emit) < 1:
            possibles = ", ".join(emits['all'])
            raise GenerationError("Iterable value for “emit” when calling Generators::run(…) must contain "
                                 f"one or more valid emit options (one of: {possibles})")
        
        # Run generators, storing output files in $TMP/yodogg
        artifacts = generate(*self.loaded_generators(), verbose=self.VERBOSE,
                                                        target=target,
                                                        emit=emit,
                                                        output_directory=self.destination,
                                                        substitutions=substitutions)
        
        # Re-dictify:
        generated = { artifact[2].name : dict(base_path=artifact[0],
                                              outputs=artifact[1],
                                              module=artifact[2]) for artifact in artifacts }
        
        # TELL ME ABOUT IT.
        if self.VERBOSE:
            module_names = ", ".join(u8str(key) for key in OCDList(generated.keys()))
            print(f"run(): Accreted {len(generated)} total generation artifacts")
            print(f"run(): Module names: {module_names}")
        
        # Return redictified artifacts:
        return generated
    
    def clear(self):
        """ Delete temporary compilation artifacts: """
        out = True
        for of in self.prelink:
            out &= rm_rf(of)
        return out
    
    def __enter__(self):
        # 0: start as you mean to go on:
        self.precompile()
        
        # 1: COMPILE ALL THE THINGS
        if self.precompiled:
            self.compile_all()
        
        # 2: Write out compilation database:
        if self.compiled and self.use_cdb:
            self.postcompile()
        
        # 3: link dynamically
        if self.compiled and self.do_shared:
            self.link()
        
        # 4: link statically (née 'archive')
        if self.compiled and self.do_static:
            self.arch()
        
        # 5: preload dynamic-linked output:
        if self.linked and self.do_preload:
            self.preload_all()
        
        # 6: return self
        return self
    
    def __exit__(self, exc_type=None, exc_val=None, exc_tb=None):
        # N.B. return False to throw, True to supress:
        self.intermediate.close()   # will destroy a TemporaryDirectory,
                                    # but not a plain Directory
        self.clear()                # will destroy all .o files
        return exc_type is None
Ejemplo n.º 5
0
 def in_directory(cls, directory):
     return cls.filename in Directory(pth=directory)
Ejemplo n.º 6
0
class CDBJsonFile(CDBBase, contextlib.AbstractContextManager):

    fields = ('filename', 'length', 'exists')
    filename = f'compilation_database{os.extsep}json'
    splitname = os.path.splitext(filename)

    @classmethod
    def in_directory(cls, directory):
        return cls.filename in Directory(pth=directory)

    def __init__(self, directory=None):
        super(CDBJsonFile, self).__init__()
        if not directory:
            directory = os.getcwd()
        self.directory = Directory(pth=directory)
        self.target = self.directory.subpath(self.filename)
        self.read_from = None
        self.written_to = None

    @property
    def name(self):
        return self.target

    @property
    def exists(self):
        return os.path.isfile(self.name)

    def read(self, pth=None):
        readpth = pth or self.target
        if not readpth:
            raise CDBError("no path value from which to read")
        readpth = os.fspath(readpth)
        if not os.path.exists(readpth):
            raise CDBError("no file from which to read")
        with open(readpth, mode="r") as handle:
            try:
                cdblist = json.load(handle)
            except json.JSONDecodeError as json_error:
                raise CDBError(str(json_error))
            else:
                for cdbentry in cdblist:
                    key = cdbentry.get('file')
                    self.entries[key] = dict(cdbentry)
        self.read_from = readpth
        return self

    def write(self, pth=None):
        with TemporaryName(prefix=self.splitname[0],
                           suffix=self.splitname[1][1:]) as tn:
            with open(tn.name, mode='w') as handle:
                handle.write(str(self))
            if pth is None:
                if self.exists:
                    rm_rf(self.name)
                tn.copy(self.name)
                self.written_to = self.name
            else:
                writepth = os.fspath(pth)
                if os.path.isdir(writepth):
                    raise CDBError("can't overwrite a directory")
                if os.path.isfile(writepth) or \
                   os.path.islink(writepth):
                    rm_rf(writepth)
                tn.copy(writepth)
                self.written_to = writepth
        return self

    def __enter__(self):
        if os.path.isfile(self.target):
            self.read()
        return self

    def __exit__(self, exc_type=None, exc_val=None, exc_tb=None):
        self.write()
Ejemplo n.º 7
0
def generate(*generators, **arguments):
    """ Invoke halogen.api.Module.compile(…) with the proper arguments. This function
        was concieved with replacing GenGen.cpp’s options in mind. """
    import os
    if __package__ is None or __package__ == '':
        import api # type: ignore
        from config import DEFAULT_VERBOSITY
        from errors import GenerationError
        from filesystem import Directory
        from utils import terminal_width, u8bytes, u8str
    else:
        from . import api # type: ignore
        from .config import DEFAULT_VERBOSITY
        from .errors import GenerationError
        from .filesystem import Directory
        from .utils import terminal_width, u8bytes, u8str
    
    # ARGUMENT PROCESSING:
    
    generators = { u8str(generator) for generator in generators }
    generator_names = OCDFrozenSet(arguments.pop('generator_names', api.registered_generators()))
    output_directory = Directory(pth=arguments.pop('output_directory', None))
    target = api.Target(target_string=u8bytes(arguments.pop('target', 'host')))
    emits = OCDFrozenSet(arguments.pop('emit', default_emits))
    substitutions = dict(arguments.pop('substitutions', {}))
    verbose = bool(arguments.pop('verbose', DEFAULT_VERBOSITY))
    
    # ARGUMENT POST-PROCESS BOUNDS-CHECKS:
    
    if len(generators) == 0:
        raise GenerationError(">=1 generator is required")
    
    if len(generator_names) == 0:
        raise GenerationError(">=1 generator name is required")
    
    if not generators.issubset(generator_names):
        raise GenerationError(f"generator name in {str(generator_names)} unknown to set: {str(generators)}")
    
    if not output_directory.exists:
        output_directory.makedirs()
    
    if not emits.issubset(valid_emits):
        raise GenerationError(f"invalid emit in {str(emits)}")
    
    if verbose:
        print("")
        print(f"generate(): Preparing {len(generators)} generator modules to emit data …")
        print("")
    
    # Set what emits to, er, emit, as per the “emit” keyword argument;
    # These have been rolled into the “emits” set (q.v. argument processing supra.);
    # …plus, we’ve already ensured that the set is valid:
    emit_dict = dict(emit_defaults)
    for emit in emits:
        emit_dict[f"emit_{emit}"] = True
    
    # The “substitutions” keyword to the EmitOptions constructor is special;
    # It’s just a dict, passed forward during argument processing:
    emit_dict['substitutions'] = substitutions
    
    # Actually create the EmitOptions object from “emit_dict”:
    emit_options = api.EmitOptions(**emit_dict)
    
    if verbose:
        print(f"generate(): Target: {u8str(target)}")
        print("generate(): Emit Options:")
        print(u8str(emit_options))
        print("")
    
    # This list will store generator module compilation artifacts:
    artifacts = []
    
    if verbose:
        print('-' * max(terminal_width, 100))
    
    # The generator loop compiles each named generator:
    for generator in generators:
        
        # “base_path” (a bytestring) is computed using the `compute_base_path()` API function:
        base_path = api.compute_base_path(u8bytes(
                                        os.fspath(output_directory)),
                                          u8bytes(generator))
        
        # “output” (an instance of halogen.api.Outputs) is computed using the eponymously named
        # halogen.api.EmitOptions method `compute_outputs_for_target_and_path()` with an instance
        # of halogen.api.Target and a base path bytestring (q.v. note supra.):
        output = emit_options.compute_outputs_for_target_and_path(target, base_path)
        
        if verbose:
            print(f"BSEPTH: {u8str(base_path)}")
            print(f"OUTPUT: {u8str(output)}")
        
        # This API call prepares the generator code module:
        module = api.get_generator_module(generator,
                                          arguments={ 'target': target })
        
        if verbose:
            print(f"MODULE: {u8str(module.name)} ({u8str(module)})")
            print('=' * max(terminal_width, 100))
        
        # The module-compilation call:
        module.compile(output)
        
        # Stow the post-compile base path (a string), outputs (an instance of
        # halogen.api.Outputs) and the module instance itself:
        artifacts.append((u8str(base_path), output, module))
    
    # Return the post-compile value artifacts for all generators:
    return artifacts