Пример #1
0
 def __init__(self, compiler, document_root_dir_path, generator, include_dir_paths):
     object.__init__(self)
     self.__compiler = compiler
     self.__document_root_dir_path = document_root_dir_path
     self.__generator = generator
     self.__include_dir_paths = include_dir_paths
     self.__logger = logging.getLogger(class_qname(Compiler))
     self.__scope_stack = []
     self.__type_by_thrift_qname_cache = {}
     self.__used_include_abspaths = {}
     self.__visited_includes = []
Пример #2
0
 def _java_compare_to(self, this_value, other_value, depth):
     raise NotImplementedError(class_qname(self) + '._java_compare_to')
Пример #3
0
 def js_schema(self, *args, **kwds):
     raise NotImplementedError(class_qname(self) + '.js_schema')
Пример #4
0
 def js_default_value(self, *args, **kwds):
     raise NotImplementedError(class_qname(self) + '.js_default_value')
Пример #5
0
 def __init__(self):
     GenericParser.__init__(self, start='document')
     self.__logger = logging.getLogger(class_qname(self))
Пример #6
0
 def _save_to_dir(self, out_dir_path):
     raise NotImplementedError(class_qname(self))
Пример #7
0
 def _java_mutable_implementation_qname(self):
     raise NotImplementedError(class_qname(self))
Пример #8
0
 def py_qname(self, name=None, **kwds):
     raise NotImplementedError(class_qname(self) + '.py_qname')
Пример #9
0
 def java_is_reference(self):
     raise NotImplementedError(class_qname(self) + '.java_is_reference')
Пример #10
0
 def java_faker(self, **kwds):
     raise NotImplementedError(class_qname(self) + '.java_faker')
Пример #11
0
 def java_compare_to(self, this_value, other_value):
     if self.java_is_reference():
         return "%(this_value)s.compareTo(%(other_value)s)" % locals()
     else:
         raise NotImplementedError(class_qname(self) + '.java_compare_to')
Пример #12
0
 def java_default_value(self):
     raise NotImplementedError(class_qname(self) + '.java_default_value')
Пример #13
0
 def __repr__(self):
     raise NotImplementedError(class_qname(self) + ".__repr__")
Пример #14
0
 def cpp_repr(self):
     raise NotImplementedError(class_qname(self) + '.cpp_repr')
Пример #15
0
 def py_check(self, value):
     raise NotImplementedError(class_qname(self) + '.py_check')
Пример #16
0
 def py_write_protocol(self, value, depth=0):
     raise NotImplementedError(class_qname(self) + '.py_write_protocol')
Пример #17
0
 def java_literal(self, value):
     raise NotImplementedError(class_qname(self) + '.java_literal')
Пример #18
0
 def _py_imports_definition(self, caller_stack):
     raise NotImplementedError(
         class_qname(self) + '._py_imports_definition')
Пример #19
0
 def java_to_string(self, value):
     raise NotImplementedError(class_qname(self) + '.java_to_string')
Пример #20
0
 def _logger(self):
     try:
         return self.__logger
     except AttributeError:
         self.__logger = logging.getLogger(class_qname(LintGenerator))
         return self.__logger
Пример #21
0
 def js_check(self, *args, **kwds):
     raise NotImplementedError(class_qname(self) + '.js_check')
Пример #22
0
 def _save_to_dir(self, out_dir_path):
     raise NotImplementedError(class_qname(self))
Пример #23
0
 def _py_imports_definition(self, caller_stack):
     raise NotImplementedError(class_qname(self) + '._py_imports_definition')
Пример #24
0
 def _logger(self):
     if self.__logger is None:
         self.__logger = logging.getLogger(class_qname(self))
     return self.__logger
Пример #25
0
 def _py_imports_use(self, caller_stack):
     raise NotImplementedError(class_qname(self) + '._py_imports_use')
Пример #26
0
 def js_is_model(self, *args, **kwds):
     raise NotImplementedError(class_qname(self) + '.js_is_model')
Пример #27
0
    def tokenize(self, input_):
        if isinstance(input_, file):
            input_ = input_.read()
            input_filename = input_.name
        elif isinstance(input_, str):
            if os.path.exists(input_):
                with open(input_, 'rb') as input_file:
                    input_ = input_file.read()
                    input_filename = input_file.name
            else:
                input_ = input_
                input_filename = '<string>'
        else:
            raise TypeError(type(input_))

        input_lines = input_.splitlines()

        logger = logging.getLogger(class_qname(self))
        logger.debug("tokenizing " + input_filename)

        output = []

        offset = 0
        input_len = len(input_)
        patterns = self.__PATTERNS
        while offset < input_len:
            for pattern in patterns:
                match = pattern[0].match(input_, offset)
                if match is None:
                    continue

                colno = match.start(1)
                for lineno, line in enumerate(input_lines):
                    if len(line) < colno:
                        colno -= len(line)
                    else:
                        break

                token_offset = match.start(1)
                token_text = match.group(1)
                token_type = pattern[1]
                if token_type == Token.Type.IDENTIFIER:
                    if token_text.islower():
                        try:
                            token_type = getattr(Token.Type, 'KEYWORD_' + token_text.upper())
                        except AttributeError:
                            pass
                token = \
                    Token(
                        colno=colno,
                        index=len(output),
                        input_=input_,
                        input_filename=input_filename,
                        lineno=lineno,
                        offset=token_offset,
                        text=token_text,
                        type_=token_type
                    )
                logger.debug(repr(token))
                output.append(token)

                offset = match.end()

                break

            if match is None:
                colno = offset
                for lineno, line in enumerate(input_lines):
                    if len(line) < colno:
                        colno -= len(line)
                    else:
                        break
                raise ScanException(colno=colno, filename=input_filename, lineno=lineno, offset=offset, text=input_)

        output.append(
            Token(
                colno=0,
                index=len(output),
                input_=input_,
                input_filename=input_filename,
                lineno=len(input_lines),
                offset=len(input_),
                text='',
                type_=Token.Type.EOF
            )
        )

        return output
Пример #28
0
 def js_validation(self, *args, **kwds):
     raise NotImplementedError(class_qname(self) + '.js_validation')
Пример #29
0
 def _java_mutable_implementation_qname(self):
     raise NotImplementedError(class_qname(self))
Пример #30
0
 def sql_repr(self):
     raise NotImplementedError(class_qname(self) + '.sql_repr')
Пример #31
0
 def java_boxed_name(self):
     raise NotImplementedError(class_qname(self) + '.java_boxed_name')
Пример #32
0
 def py_read_protocol(self):
     raise NotImplementedError(class_qname(self) + '.py_read_protocol')
Пример #33
0
 def __init__(self):
     GenericParser.__init__(self, start='document')
     self.__logger = logging.getLogger(class_qname(self))
Пример #34
0
 def py_name(self):
     raise NotImplementedError(class_qname(self) + '.py_name')
Пример #35
0
 def thrift_ttype_name(self):
     raise NotImplementedError(class_qname(self) + '.thrift_ttype_name')
Пример #36
0
    def tokenize(self, input_):
        if isinstance(input_, file):
            input_ = input_.read()
            input_filename = input_.name
        elif isinstance(input_, str):
            if os.path.exists(input_):
                with open(input_, 'rb') as input_file:
                    input_ = input_file.read()
                    input_filename = input_file.name
            else:
                input_ = input_
                input_filename = '<string>'
        else:
            raise TypeError(type(input_))

        input_lines = input_.splitlines()

        logger = logging.getLogger(class_qname(self))
        logger.debug("tokenizing " + input_filename)

        output = []

        offset = 0
        input_len = len(input_)
        patterns = self.__PATTERNS
        while offset < input_len:
            for pattern in patterns:
                match = pattern[0].match(input_, offset)
                if match is None:
                    continue

                colno = match.start(1)
                for lineno, line in enumerate(input_lines):
                    if len(line) < colno:
                        colno -= len(line)
                    else:
                        break

                token_offset = match.start(1)
                token_text = match.group(1)
                token_type = pattern[1]
                if token_type == Token.Type.IDENTIFIER:
                    if token_text.islower():
                        try:
                            token_type = getattr(Token.Type, 'KEYWORD_' + token_text.upper())
                        except AttributeError:
                            pass
                token = \
                    Token(
                        colno=colno,
                        index=len(output),
                        input_=input_,
                        input_filename=input_filename,
                        lineno=lineno,
                        offset=token_offset,
                        text=token_text,
                        type_=token_type
                    )
                logger.debug(repr(token))
                output.append(token)

                offset = match.end()

                break

            if match is None:
                colno = offset
                for lineno, line in enumerate(input_lines):
                    if len(line) < colno:
                        colno -= len(line)
                    else:
                        break
                raise ScanException(colno=colno, filename=input_filename, lineno=lineno, offset=offset, text=input_)

        output.append(
            Token(
                colno=0,
                index=len(output),
                input_=input_,
                input_filename=input_filename,
                lineno=len(input_lines),
                offset=len(input_),
                text='',
                type_=Token.Type.EOF
            )
        )

        return output
Пример #37
0
 def ts_repr(self):
     raise NotImplementedError(class_qname(self) + ".ts_repr")
Пример #38
0
 def _py_imports_use(self, caller_stack):
     raise NotImplementedError(class_qname(self) + '._py_imports_use')
Пример #39
0
 def java_repr(self):
     raise NotImplementedError(class_qname(self) + '.java_repr')
Пример #40
0
 def py_repr(self):
     raise NotImplementedError(class_qname(self) + '.py_repr')
Пример #41
0
 def dart_repr(self):
     raise NotImplementedError(class_qname(self) + '.dart_repr')