Esempio n. 1
0
    def rewrite_source(
            self, source: str,
            meta_information: Dict[str,
                                   str]) -> List[Tuple[str, Dict[str, str]]]:

        reconstruction = ''
        directives = {}
        directives_meta = deepcopy(meta_information)
        directives_meta[
            'location'] = meta_information['location'] + '.directives'
        first_line = True
        for line in source.splitlines(keepends=True):
            if first_line:
                directives[line.strip()] = []
                vprint2(f"[Directives] found variant {line}")
                first_line = False
            match = self.matcher.match(line)
            if match is not None:
                vprint1(f"[Directives] found directive {line}")
                state = match.group(1).split(' ')
                directives[state[0]] = state[1:]
            else:
                reconstruction += line

        directives = '\n'.join(
            [' '.join([k] + v) for k, v in directives.items()]) + '\n'

        return [(reconstruction, meta_information),
                (directives, directives_meta)]
Esempio n. 2
0
def main():
    vprint1(f"[Bootstrap] Application Started: {version}")
    arguments = docopt(__doc__, version=version)

    vprint.verbosity_level = arguments['-v']

    vprint2(f"[Bootstrap] Parsed Arguments:\n{arguments}")

    pipeline = [
        AddShaMarker(),
        Includes(arguments['-I']),
        ActiveShaderDefines(),
        Variant(),
        ExtractDirectives(),
        ShaderSplitter(),
        GeometryInput(),
        VersionToDefines([eq_split(x) for x in arguments['-D']]),
        LayoutSugar(),
        LocationAuto(),
        NewlNewl2Newl(),
        Indents()
    ]

    vprint2(f"[Bootstrap] Created Pipeline:\n{pipeline}")

    vprint1(f"[Bootstrap] Making Compiler")

    compiler = RewriteCompiler(pipeline)

    for location in arguments['<file>']:
        do_compile(location, arguments['--format'], arguments['--output'], compiler)
Esempio n. 3
0
    def rewrite_source(self, source: str, meta_information: Dict[str, str]) -> List[Tuple[str, Dict[str, str]]]:
        vprint1("[Geometry-Input] Rewriter started!")

        ret = self.rgx_input.sub(self.rewrite_input_match, source)
        meta = deepcopy(meta_information)

        dict_str_list_append_list(meta, 'extra_defines', self.extradefines)

        return [(self.rgx_output.sub(self.pattern_output, ret), meta)]
Esempio n. 4
0
    def rewrite_source(
            self, source: str,
            meta_information: Dict[str,
                                   str]) -> List[Tuple[str, Dict[str, str]]]:
        vprint1("[LFLF-LF] Rewriter started!")
        done = False
        while not done:
            (done, source) = remove_newlines(source)

        return [(source, meta_information)]
Esempio n. 5
0
    def rewrite_source(
            self, source: str,
            meta_information: Dict[str,
                                   str]) -> List[Tuple[str, Dict[str, str]]]:
        vprint1("[Indents] Rewriter Started")
        lines = source.splitlines(keepends=True)
        indent_level = 0

        recreation = ""

        for line in lines:
            vprint3(f"[Indents] @level {indent_level}")
            indent_level -= line.count("}")
            recreation += ("\t" * indent_level) + line
            indent_level += line.count("{")

        return [(recreation, meta_information)]
Esempio n. 6
0
 def rewrite_source(
         self, source: str,
         meta_information: Dict[str,
                                str]) -> List[Tuple[str, Dict[str, str]]]:
     vprint1("[Active-Shader-Defines] Rewriter started!")
     iterator = self.find_shader_keyword.finditer(source)
     for match in iterator:
         for shaderType in match.group(1).split(','):
             key = shaderType.strip()
             if key in self.lut and self.extradefines.count(
                     self.lut[key]) == 0:
                 self.extradefines += [self.lut[key]]
     meta = deepcopy(meta_information)
     defs = meta.setdefault('extra_defines', '')
     meta['extra_defines'] = ','.join(
         list(filter(None,
                     defs.split(',') + self.extradefines))) + ','
     return [(source, meta)]
    def rewrite_source(
            self, source: str,
            meta_information: Dict[str,
                                   str]) -> List[Tuple[str, Dict[str, str]]]:
        vprint1("[Defines] Rewriter Started")
        vprint2(
            f"[Defines] I have the following defines to insert:\n{self.defines}"
        )
        vprint2(
            f"[Defines] The following extra defines have been passed via the pipeline:\n{meta_information.get('extra_defines', '').split(',')}"
        )

        source = re.sub(
            r"#\s*version\s+([0-9]+)\s*", '#version \\1 \n' +
            (''.join([f"\n#define {k} {v}" for k, v in self.defines])) +
            (''.join([
                f"\n#define {k}"
                for k in meta_information.get('extra_defines', '').split(',')
            ]) if meta_information.get('extra_defines', None) is not None else
             '') + '\n\n', source)
        return [(source, meta_information)]
Esempio n. 8
0
def do_compile(location, format, output_type, compiler):
    #try:
    with open(location, 'r') as file:
        vprint2(f"[Bootstrap] Loading {location}")
        output = compiler.rewrite_file(file.read(), location)
        vprint1(f"[Bootstrap] Ran Compiler, parsing results")

    #except Exception as e:
        #vprint0(e, file=sys.stderr)
        #sys.exit(1)

    if format == '1file':
        vprint2(f"[Bootstrap] Amalgamating Output")
        output = amalgamate(output, location)

    for (source, location) in output:
        if output_type == 'file':
            with open(location, 'w') as outfile:
                vprint2(f"[Boostrap] Writing {location}")
                outfile.write(source)
        else:
            if format != '1file':
                print(f"NAME:{location}")
            print(armorize(source))
Esempio n. 9
0
    def rewrite_file(self, source: str,
                     location: str) -> List[Tuple[str, str]]:
        """
        Applies all the rewrite-rules specified when this class was constructed, and returns a list of new sources +
        their filenames

        :param source: The input source of the file
        :param location: The file location of the source-file
        :return: new sources + locations
        """
        vprint1("[Compiler] Started Rewriting")
        # generate workspace
        workspaces = [(source, {'location': location})]

        # run all re-writers over the source
        for rewriter in self.rewriters:
            vprint1(f"[Compiler] Current Rewrite Module:{rewriter}")
            accumulator = []

            for (source, meta) in workspaces:
                accumulator += rewriter.rewrite_source(source, meta)

            vprint3(
                f"[Compiler] Accumulator after Rewrite from {rewriter}:\n{accumulator}"
            )

            workspaces = accumulator

        # transform the workspaces into source, location pairs
        ret = []

        for (source, meta) in workspaces:

            if 'location' not in meta:
                vprint1(
                    'Warning! Source without location encountered, skipped!')
                continue

            ret += [(source, meta['location'])]

        return ret
Esempio n. 10
0
 def rewrite_source(
         self, source: str,
         meta_information: Dict[str,
                                str]) -> List[Tuple[str, Dict[str, str]]]:
     vprint1("[Sugar-Layout] Rewriter started!")
     return [(self.rgx.sub(self.pattern, source), meta_information)]
Esempio n. 11
0
    def rewrite_source(
            self, source: str,
            meta_information: Dict[str,
                                   str]) -> List[Tuple[str, Dict[str, str]]]:
        if meta_information.get('location', '').endswith('.directives'):
            return [(source, meta_information)]

        vprint1("[Splitter]  Rewriter Started!")
        vprint1(
            f"[Splitter] Using this dictionary:\n{self.keyword_sections_pairs}"
        )
        # split input source into lines
        lines = source.splitlines()

        # prepare output sources
        sources = dict.fromkeys(self.all_source_types, '')

        to_keep = self.all_source_types

        # prepare active sections (by default all sections are enabled
        active_sections = self.all_valid_source_types
        brace_counter = 0
        need_counting = False

        itr = 0
        while itr < len(lines):
            line = lines[itr]

            # make the loop end at some point
            itr += 1

            # check for 'shader(<...>)' keyword
            matches = self.find_shader_keyword.match(line.strip())
            if matches is not None and not need_counting:
                vprint1("[Splitter] Encountered shader kw")
                brace_counter = line.count('{')
                need_counting = True
                inner = matches.group(1)

                # match keywords to extensions and make sure they are unique
                active_sections = set(
                    self.keyword_sections_pairs.get(x.strip(),
                                                    self.fault_extension)
                    for x in inner.split(','))
                vprint2(
                    f"[Splitter] Active sections are {active_sections} now")
                # makes sure that this line does not get put into the output
                continue

            # check for 'generate(<...>)' keyword
            matches = self.find_generate_keyword.match(line.strip())
            if matches is not None:
                vprint1("[Splitter] Encountered generate kw")

                inner = matches.group(1)

                # match keywords to extensions and make sure they are unique
                to_keep = set([self.fault_extension] + [
                    self.keyword_sections_pairs.get(x.strip(),
                                                    self.fault_extension)
                    for x in inner.split(',')
                ])

                vprint2(f"[Splitter] Shaders to keep is set to {to_keep} now")

                # makes sure that this line does not get put into the output
                continue

            # check if wee need to count braces
            if need_counting and (line.find('{') != -1
                                  or line.find('}') != -1):

                vprint2(
                    f"[Splitter] Counting braces! brace counter is at:{brace_counter}"
                )
                # swap temp and line
                temp = line
                line = ''

                # for brace counting we need to check basically ever character
                for (idx, char) in enumerate(temp):
                    if char == '{':
                        brace_counter += 1

                        # do not count the first brace of a section
                        if brace_counter == 1:
                            continue

                    elif char == '}':
                        brace_counter -= 1
                        if brace_counter == 0:
                            need_counting = False
                            commit(sources, active_sections, line)
                            active_sections = self.all_valid_source_types
                            lines[itr - 1] = line[idx + 1:]
                            itr -= 1

                            continue
                    line += char

            # add the remaining part to the active sources
            commit(sources, active_sections, line.strip())

        res = []

        vprint1("[Splitter] Discarding sections I do not need")
        for keeper in to_keep:
            if sources[keeper] == '':
                continue
            meta = deepcopy(meta_information)
            meta['location'] = meta.get('location', 'error') + keeper

            dict_str_list_append(meta, 'extra_defines', to_define(keeper))

            res += [(sources[keeper], meta)]

        return res
    def rewrite_source(
            self, source: str,
            meta_information: Dict[str,
                                   str]) -> List[Tuple[str, Dict[str, str]]]:

        vprint1("[Include] Rewriter Started")

        already_included = meta_information.get('already_included',
                                                '').split(',')
        lines = source.splitlines()

        reconstruction = ''

        for line in lines:
            match = self.rgx.match(line)
            if match is not None:
                location = match.group(1)

                file_found = False
                vprint2(
                    f"[Include] Searching for {location} in {self.include_dirs}"
                )

                for include_dir in self.include_dirs:

                    vprint1(
                        f"[Include] Searching for {location} in {include_dir}")
                    test = os.path.join(include_dir, location)
                    if os.path.isfile(test) and os.path.exists(test):
                        vprint1("[Include] " + test)
                        if test not in already_included:

                            vprint1(f"[Include] Candidate {test} is valid")
                            already_included.append(test)
                            meta_information['already_included'] = ','.join(
                                already_included)
                            with open(test) as include:
                                vprint2("[Include] Candidate opened")
                                contents = include.read()
                                marker = sha1mark(contents, test)
                                [(src, _)] = self.rewrite_source(
                                    contents,
                                    merge_dicts(meta_information,
                                                {'location': test}))

                                vprint2("[Include] candidate parsed!")
                                reconstruction += marker + '\n' + src + '\n'
                                file_found = True
                        else:
                            vprint1(
                                "[Include] Candidate was ignored because it was already included"
                            )
                            file_found = True
                        break

                if not file_found:
                    raise FileNotFoundError(
                        f"FATAL: The include {location} was not found ")

            else:
                reconstruction += line + '\n'

        return [(reconstruction, meta_information)]
    def rewrite_source(
            self, source: str,
            meta_information: Dict[str,
                                   str]) -> List[Tuple[str, Dict[str, str]]]:
        vprint1("[Variants] Rewriter started!")

        lines = source.splitlines(keepends=True)

        # extract the info we need
        base_name = meta_information.get('location').rsplit('.', maxsplit=1)

        # recreations
        sources = {'default': ""}
        active_variants = None
        delete_next_curly_open = False
        bc = 0

        for line in lines:
            # check if variant keyword was mentioned
            matches = self.matcher.match(line)
            if matches is not None:
                active_variants = [
                    v.strip().replace(' ', '_')
                    for v in matches.group(1).split(',')
                ]
                vprint2("[Variants] New Active Variants: ", active_variants)
                for v in active_variants:
                    if v not in sources:
                        sources[v] = sources['default']

                if '{' not in line:
                    delete_next_curly_open = True
                else:
                    bc = 1

                # this should not end up in the shaders
                continue

            if delete_next_curly_open and line.count('{') != 0:
                delete_next_curly_open = False
                line = line.replace('{', '', 1).strip()
                bc = 1

            if active_variants is None:
                vprint2("[Variants] Without active variant")
                vprint2(line)
                new_dict = {}
                for k, v in sources.items():
                    new_dict[k] = v + line
                sources = new_dict
            else:
                vprint2(line)
                vprint2("[Variants] Adding something to ", active_variants)
                bc += line.count('{')
                tmp = bc

                bc -= line.count('}')
                if bc <= 0:

                    closing_for_active_variants = '}' * (bc + tmp - 1)
                    closing_for_everyone = '}' * -bc
                    new_dict = {}

                    for k, v in sources.items():
                        new_dict[k] = v + closing_for_everyone
                        if k in active_variants:
                            new_dict[k] += closing_for_active_variants

                    sources = new_dict

                    active_variants = None
                    continue

                for variant in active_variants:
                    if variant in sources:
                        sources[variant] += line
                    else:
                        sources[variant] = line

        table = []

        vprint2("[Variants] sources: ", sources)

        for k, v in sources.items():
            if k == "default":
                table += [(k + '\n' + v, meta_information)]
            else:
                meta = deepcopy(meta_information)
                meta['location'] = base_name[0] + "." + k + "." + base_name[1]
                table += [(k + '\n' + v, meta)]

        return table