Ejemplo n.º 1
0
    def readStyle(self, ssname):
        # If callables are used, they should probably be subclassed
        # strings, or something else that will print nicely for errors
        if isinstance(ssname, collections.Callable):
            return ssname()

        fname = self.findStyle(ssname)
        if fname:
            try:
                return rson_loads(open(fname).read())
            except ValueError as e:  # Error parsing the JSON data
                log.critical('Error parsing stylesheet "%s": %s' %
                             (fname, str(e)))
            except IOError as e:  # Error opening the ssheet
                log.critical('Error opening stylesheet "%s": %s' %
                             (fname, str(e)))
Ejemplo n.º 2
0
    def readStyle(self, ssname):
        # If callables are used, they should probably be subclassed
        # strings, or something else that will print nicely for errors
        if isinstance(ssname, collections.Callable):
            return ssname()

        fname = self.findStyle(ssname)
        if fname:
            try:
                return rson_loads(open(fname).read())
            except ValueError as e:  # Error parsing the JSON data
                log.critical('Error parsing stylesheet "%s": %s' %
                             (fname, str(e)))
            except IOError as e:  # Error opening the ssheet
                log.critical('Error opening stylesheet "%s": %s' %
                             (fname, str(e)))
Ejemplo n.º 3
0
    def handle_style(self, chunk):
        ''' Parse through the source until we find lines that are no longer indented,
            then pass our indented lines to the RSON parser.
        '''
        self.changed = True
        if chunk:
            log.error(".. style:: does not recognize string %s" % repr(chunk))
            return

        mystyles = '\n'.join(self.read_indented())
        if not mystyles:
            log.error("Empty .. style:: block found")
        try:
            styles = rson_loads(mystyles)
        except ValueError as e:  # Error parsing the JSON data
                log.critical('Error parsing stylesheet "%s": %s' % \
                    (mystyles, str(e)))
        else:
            self.styles.setdefault('styles', {}).update(styles)
    def handle_style(self, chunk):
        ''' Parse through the source until we find lines that are no longer indented,
            then pass our indented lines to the RSON parser.
        '''
        self.changed = True
        if chunk:
            log.error(".. style:: does not recognize string %s" % repr(chunk))
            return

        mystyles = '\n'.join(self.read_indented())
        if not mystyles:
            log.error("Empty .. style:: block found")
        try:
            styles = rson_loads(mystyles)
        except ValueError as e:  # Error parsing the JSON data
            log.critical('Error parsing stylesheet "%s": %s' % \
                (mystyles, str(e)))
        else:
            self.styles.setdefault('styles', {}).update(styles)
Ejemplo n.º 5
0
 def __call__(self):
     return rson_loads(self.value)
Ejemplo n.º 6
0
    def __init__(self, sourcef, incfile=False, widthcount=0):
        """
        Process a file and decorate the resultant Preprocess instance with
        self.result (the preprocessed file) and self.styles (extracted stylesheet
        information) for the caller.
        """
        self.widthcount = widthcount

        name = sourcef.name
        source = sourcef.read().replace('\r\n', '\n').replace('\r', '\n')

        # Make the determination if an include file is a stylesheet or
        # another restructured text file, and handle stylesheets appropriately.
        if incfile:
            try:
                self.styles = styles = rson_loads(source)
                substyles = styles.get('styles')
                if substyles is not None:
                    styles['styles'] = dict(substyles)
            except:
                pass
            else:
                self.changed = True
                self.keep = False
                return

        # Read the whole file and wrap it in a DummyFile
        self.sourcef = DummyFile(source)
        self.sourcef.name = name

        # Use a regular expression on the source, to take it apart
        # and put it back together again.
        self.source = source = [x for x in self.splitter(source) if x]
        self.result = result = []
        self.styles = {}
        self.changed = False

        # More efficient to pop() a list than to keep taking tokens from [0]
        source.reverse()
        isblank = False
        keywords = self.keywords
        handle_single = keywords['single::']
        while source:
            wasblank = isblank
            isblank = False
            chunk = source.pop()
            result.append(chunk)

            # Only process single lines
            if not chunk.endswith('\n'):
                continue
            result[-1] = chunk[:-1]
            if chunk.index('\n') != len(chunk) - 1:
                continue

            # Parse the line to look for one of our keywords.
            tokens = chunk.split()
            isblank = not tokens
            if len(tokens) >= 2 and tokens[0] == '..' and tokens[1].endswith('::'):
                func = keywords.get(tokens[1])
                if func is None:
                    continue
                chunk = chunk.split('::', 1)[1]
            elif wasblank and len(tokens) == 1 and chunk[0].isalpha() and tokens[0].isalpha():
                func = handle_single
                chunk = tokens[0]
            else:
                continue

            result.pop()
            func(self, chunk.strip())

        # Determine if we actually did anything or not.  Just use our source file
        # if not.  Otherwise, write the results to disk (so the user can use them
        # for debugging) and return them.
        if self.changed:
            result.append('')
            result = DummyFile('\n'.join(result))
            result.name = name + '.build_temp'
            self.keep = keep = len(result.strip())
            if keep:
                with open(result.name, 'w') as f:
                    f.write(result)
            self.result = result
        else:
            self.result = self.sourcef
    def __init__(self, sourcef, incfile=False, widthcount=0):
        """
        Process a file and decorate the resultant Preprocess instance with
        self.result (the preprocessed file) and self.styles (extracted stylesheet
        information) for the caller.
        """
        self.widthcount = widthcount

        name = sourcef.name
        source = sourcef.read().replace('\r\n', '\n').replace('\r', '\n')

        # Make the determination if an include file is a stylesheet or
        # another restructured text file, and handle stylesheets appropriately.
        if incfile:
            try:
                self.styles = styles = rson_loads(source)
                substyles = styles.get('styles')
                if substyles is not None:
                    styles['styles'] = dict(substyles)
            except:
                pass
            else:
                self.changed = True
                self.keep = False
                return

        # Read the whole file and wrap it in a DummyFile
        self.sourcef = DummyFile(source)
        self.sourcef.name = name

        # Use a regular expression on the source, to take it apart
        # and put it back together again.
        self.source = source = [x for x in self.splitter(source) if x]
        self.result = result = []
        self.styles = {}
        self.changed = False

        # More efficient to pop() a list than to keep taking tokens from [0]
        source.reverse()
        isblank = False
        keywords = self.keywords
        handle_single = keywords['single::']
        while source:
            wasblank = isblank
            isblank = False
            chunk = source.pop()
            result.append(chunk)

            # Only process single lines
            if not chunk.endswith('\n'):
                continue
            result[-1] = chunk[:-1]
            if chunk.index('\n') != len(chunk) - 1:
                continue

            # Parse the line to look for one of our keywords.
            tokens = chunk.split()
            isblank = not tokens
            if len(tokens) >= 2 and tokens[0] == '..' and tokens[1].endswith(
                    '::'):
                func = keywords.get(tokens[1])
                if func is None:
                    continue
                chunk = chunk.split('::', 1)[1]
            elif wasblank and len(tokens) == 1 and chunk[0].isalpha(
            ) and tokens[0].isalpha():
                func = handle_single
                chunk = tokens[0]
            else:
                continue

            result.pop()
            func(self, chunk.strip())

        # Determine if we actually did anything or not.  Just use our source file
        # if not.  Otherwise, write the results to disk (so the user can use them
        # for debugging) and return them.
        if self.changed:
            result.append('')
            result = DummyFile('\n'.join(result))
            result.name = name + '.build_temp'
            self.keep = keep = len(result.strip())
            if keep:
                with open(result.name, 'w') as f:
                    f.write(result)
            self.result = result
        else:
            self.result = self.sourcef
Ejemplo n.º 8
0
 def __call__(self):
     return rson_loads(self.value)