Example #1
0
def wrap_lists(ast):
    """
    Wraps consecutive listitem tokens in a list token. Applied recursively.

    :param ast: The tree to be modified
    :return: The modified tree
    """
    new_ast = []
    buffer = []
    for token in ast:
        if hasattr(token, 'definition'):
            wrap_lists(token.definition)
        if hasattr(token, 'name') and 'listitem' in token.name:
            buffer.append(token)
        else:
            if buffer:
                new_ast.append(util.Map({
                    'name': 'list',
                    'definition': buffer
                }))
                buffer = []
            new_ast.append(token)
    if buffer:
        new_ast.append(util.Map({'name': 'list', 'definition': buffer}))
    return new_ast
Example #2
0
def parse():
    """
    Converts mixed files from source to an intermediate based on the extension of the template file. Once converted the
    intermediate is run through the default filters and the extension formatter's processors.

    :return: A list of intermediates corresponding to each of the mixed files
    """
    intermediates, parser = [], get_format(options.state.template())
    if not parser['format']:
        raise FormatError('This format is export only!')
    # Loop through all weaved files
    for file in fileutil.with_extension('.tex'):
        options.post('Using ' + parser['name'] + ' format to parse ' + file)
        intermediate = util.Map({
            'ast': [],
            'src': parser['parser_preprocessor'](fileutil.read(file)),
            'fmt': parser,
            'name': file.replace('.tex', '')
        })
        intermediate.ast = parse_tokens(intermediate.src, parser)
        for default_filter in config.default_filters:
            intermediate.ast = default_filter(intermediate.ast)
        intermediate = parser['parser_postprocessor'](intermediate)
        intermediates.append(intermediate)
        fileutil.write(options.state.cwd() + '/parsed-ast', ''.join(str_token(intermediate.ast)))
    options.post('Successfully parsed', parser['name'] + '.')
    return intermediates
Example #3
0
def add_option(name, flag, description, default, type_, value=None):
    """
    Add an option.

    :param name: The name of the option
    :param flag: The flag for the option
    :param description: A description of the option
    :param default: The default value of the option
    :param type_: The type of the option
    :param value: A value, defaults to None
    :return: The value of the option after parsing any hanging options
    """
    option = util.Map({
        'name': name,
        'flag': flag,
        'description': description,
        'default': default,
        'type_': type_,
        'value': value
    })
    state.update({name: functools.partial(compile_, option)})
    _compiled.update({flag: option, '-' + name: option, '--' + name: option})
    _unique.update({name: option})
    # Load any hanging options
    load_options([])
    return state[name]()
Example #4
0
def clear():
    """
    Clear all compiled, unique, and hanging options.
    """
    global state, _compiled, _unique, _hanging
    state = util.Map()
    _compiled = {}
    _unique = {}
    _hanging = []
Example #5
0
 def def_prompt(token):
     try:
         definition = token.definition[0].definition
         title, prompt = definition.pop(0), []
         while len(definition) > 0 and (not hasattr(definition[0], 'name')
                                        or definition[0].name
                                        in ['$', 'img', 'verbatim']):
             prompt.append(definition.pop(0))
         definition.insert(
             0, util.Map({
                 'name': 'prompt',
                 'definition': prompt
             }))
         definition.insert(0, title)
         return token
     except AttributeError:
         raise (parser_composer.FormatError(
             'Malformed question token definition:' + str(token)))
Example #6
0
def add_format(name,
               extensions,
               format,
               description='',
               parser_preprocessor=lambda _: _,
               parser_postprocessor=lambda _: _,
               composer_postprocessor=lambda _: _,
               composer_preprocessor=lambda _: _,
               left_paren=None,
               right_paren=None
):
    """
    Add a format to the formatter.

    :param name: The name of the format
    :param extensions: A list of extensions related to the format
    :param description: A description of the format
    :param format: The token list for the format
    :param parser_preprocessor: The parsing pre processor
    :param parser_postprocessor: The parsing post processor
    :param composer_postprocessor: The composer pre processor
    :param composer_preprocessor: The composer post processor
    """
    format.update(collections.OrderedDict([
        ('pyxamnumerical', [':pyxam.numerical', '.']),
        ('pyxamcalculated', [':pyxam.calculated', '.'])
    ]))
    fmt = {
        'name': name,
        'extensions': extensions + [name],
        'description': description,
        'parser_preprocessor': parser_preprocessor,
        'parser_postprocessor': parser_postprocessor,
        'composer_preprocessor': composer_preprocessor,
        'composer_postprocessor': composer_postprocessor,
        'left_paren': left_paren,
        'right_paren': right_paren,
        'format': format
    }
    formats.update(dict((extension, fmt) for extension in fmt['extensions']))
    fmt['format'] = collections.OrderedDict([
        (name, util.Map({'name': name, 'definition': definition})) for name, definition in fmt['format'].items()
    ])
 def get_map(self):
     game_map = {}
     game_map["width"] = self.size[0]
     game_map["height"] = self.size[1]
     game_map["worldTick"] = self.tick
     # Generate map information about snakes
     snake_infos = []
     for snake in self.snakes:
         snake_info = {}
         snake_info["points"] = snake.score
         snake_positions = []
         for position in snake.positions:
             # Todo this depends on how num is calculated in the real game
             snake_positions.append(self.convert_position(position))
         snake_info["positions"] = snake_positions
         snake_infos.append(snake_info)
     game_map["snakeInfos"] = snake_infos
     # Generate map information about obstacles
     obstacles = []
     for obstacle in self.obstacles:
         obstacles.append(self.convert_position(obstacle.position))
     game_map["obstacles"] = obstacles
     print(game_map)
     return util.Map(game_map)
Example #8
0
 def _map_update(self, msg):
     print(msg['map'])
     direction = self.snake.get_next_move(util.Map(msg['map']))
     self._send(messages.register_move(str(direction), msg))
Example #9
0
def build_token(token, src, fmt):
    """
    Attempts to convert the source into a token.

    :param token: The token to build
    :param src: The source to build from
    :param fmt: The format to build to
    :return: The token if built or none
    """
    definition, unmatched, packing = [], src, False
    for symbol in token.definition[:-1]:
        if packing:
            matched, parentheses = '', 0
            while packing:
                # No match
                if not unmatched:
                    return None, src
                # If token we don't care what parentheses level we are in
                elif isinstance(symbol, list):
                    for sub_token in symbol:
                        child, unmatched = build_token(fmt['format'][sub_token], unmatched, fmt)
                        if child is not None:
                            definition += parse_tokens(matched, fmt)
                            definition.append(child)
                            packing = False
                            break
                    else:
                        matched, unmatched = increment(matched, unmatched)
                # Move down a parentheses level
                elif fmt['left_paren'] is not None and unmatched.startswith(fmt['left_paren']):
                    parentheses += 1
                    matched, unmatched = increment(matched, unmatched)
                # If nested move back up a parentheses level
                elif parentheses != 0 and unmatched.startswith(fmt['right_paren']):
                    parentheses -= 1
                    matched, unmatched = increment(matched, unmatched)
                # If parentheses are not balanced consume character
                elif parentheses != 0:
                    matched, unmatched = increment(matched, unmatched)
                # If at the end of content
                elif isinstance(symbol, str) and unmatched.startswith(symbol):
                    definition += [matched] if filters.has_name(token, ['$', 'verb', 'comment']) else parse_tokens(matched, fmt)
                    unmatched = unmatched[len(symbol):]
                    packing = False
                # If not at the end of content
                elif isinstance(symbol, str):
                    matched, unmatched = increment(matched, unmatched)
                # No match
                else:
                    return None, src
        # If str
        elif isinstance(symbol, str) and unmatched.startswith(symbol):
            unmatched = unmatched[len(symbol):]
        # If token
        elif isinstance(symbol, list):
            for sub_token in symbol:
                child, unmatched = build_token(fmt['format'][sub_token], unmatched, fmt)
                if child is not None:
                    definition.append(child)
                    break
            else:
                return None, src
        # If content
        elif isinstance(symbol, tuple):
            packing = True
        # No match
        else:
            return None, src
    # If exited before packing
    if packing:
        matched, parentheses = '', 0
        while packing:
            # End of string
            if len(unmatched) == 0:
                definition += [matched] if ('$' or 'verbatim' or 'comment') in token.name else parse_tokens(matched, fmt)
                packing = False
            # Move down a parentheses level
            elif fmt['left_paren'] is not None and unmatched.startswith(fmt['left_paren']):
                parentheses += 1
                matched, unmatched = increment(matched, unmatched)
            # If nested move back up a parentheses level
            elif parentheses != 0 and unmatched.startswith(fmt['right_paren']):
                parentheses -= 1
                matched, unmatched = increment(matched, unmatched)
            # If parentheses are not balanced consume character
            elif parentheses != 0:
                matched, unmatched = increment(matched, unmatched)
            # If at the end of content
            elif re.match(r'^\s*(({})|$).*'.format(token.definition[-1]), unmatched, re.DOTALL):
                definition += [matched] if filters.has_name(token, ['$', 'verb', 'comment']) else parse_tokens(matched, fmt)
                packing = False
            # If not at the end of content
            else:
                matched, unmatched = increment(matched, unmatched)
    # Check if ending regex matches
    if unmatched == '' or re.match(r'^\s*(({})|$).*'.format(token.definition[-1]), unmatched, re.DOTALL):
        return util.Map({'name': token.name, 'definition': definition}), unmatched
    return None, src
Example #10
0
This module handles the command line arguments for Pyxam which act as global state for the entire program.
"""
import util
import functools


class OptionError(Exception):
    """
    Error wrapper for Exceptions that occur within the options module.
    """
    pass


# A Map of all current compile functions
state = util.Map()

# A Map of all current options
_compiled = {}

# A map of all unique current options
_unique = {}

# A list of yet to be compiled options
_hanging = []


def clear():
    """
    Clear all compiled, unique, and hanging options.
    """