def PARSE(self, object_type=None, object_instance=None): if object_type is None: object_type = self.http_message_type if object_instance is None: object_instance = decamelize(object_type) http_message_type_lower = decamelize(self.http_message_type) self.append("""%(object_type)s* %(object_instance)s = Object::cast<%(object_type)s>(%(http_message_type_lower)s_parser.parse());""" % locals())
def __java_markers(self): service_marker_variable_name = service_marker_name = decamelize( self.name).upper() add_function_markers = [] function_markers = [] for function in self.functions: function_marker_variable_name = function.java_marker_variable_name( ) function_markers.append( "public final static org.slf4j.Marker %s = org.slf4j.MarkerFactory.getMarker(\"%s\");" % (function_marker_variable_name, function.java_marker_name())) add_function_markers.append( "%(service_marker_variable_name)s.add(%(function_marker_variable_name)s);" % locals()) add_function_markers = "\n".join( indent(' ' * 8, add_function_markers)) function_markers = "\n".join(indent(' ' * 4, function_markers)) return """\ public static class Markers { %(function_markers)s public final static org.slf4j.Marker %(service_marker_variable_name)s = org.slf4j.MarkerFactory.getMarker("%(service_marker_name)s"); static { %(add_function_markers)s } }""" % locals()
def elastic_search_mappings_dict(self): document_type = None for annotation in self.annotations: if annotation.name == 'elastic_search_document_type': document_type = annotation.value break if document_type is None: document_type = decamelize(self.name) properties = OrderedDict() for field in self.fields: properties[field.elastic_search_name( )] = field.elastic_search_mapping_dict() mappings = {} mappings[document_type] = \ { '_all': {'enabled': False}, 'dynamic': 'strict', } for annotation in self.annotations: if annotation.name == 'elastic_search_mappings_base': mappings[document_type].update(annotation.value) if 'properties' in mappings[document_type]: updated_properties = OrderedDict() updated_properties.update( mappings[document_type]['properties']) updated_properties.update(properties) properties = updated_properties mappings[document_type]['properties'] = properties return mappings
def elastic_search_mappings_dict(self): document_type = None for annotation in self.annotations: if annotation.name == 'elastic_search_document_type': document_type = annotation.value break if document_type is None: document_type = decamelize(self.name) properties = OrderedDict() for field in self.fields: properties[field.elastic_search_name()] = field.elastic_search_mapping_dict() mappings = {} mappings[document_type] = \ { '_all': {'enabled': False}, 'dynamic': 'strict', } for annotation in self.annotations: if annotation.name == 'elastic_search_mappings_base': mappings[document_type].update(annotation.value) if 'properties' in mappings[document_type]: updated_properties = OrderedDict() updated_properties.update(mappings[document_type]['properties']) updated_properties.update(properties) properties = updated_properties mappings[document_type]['properties'] = properties return mappings
def _py_method_update(self): if len(self.fields) == 0: return {} doc = indent(" " * 4, "\n".join(field.py_sphinx_doc() for field in self.fields)) name = self.py_name() other_name = decamelize(self.py_name()) object_updates = "\n".join( indent( " " * 8, ( "self.%s(%s.%s)" % (field.py_setter_name(), other_name, field.py_getter_call()) for field in self.fields ), ) ) return { "update": """\ def update(self, %(other_name)s): ''' %(doc)s ''' if isinstance(%(other_name)s, %(name)s): %(object_updates)s elif isinstance(%(other_name)s, dict): for key, value in %(other_name)s.iteritems(): getattr(self, 'set_' + key)(value) else: raise TypeError(%(other_name)s) return self """ % locals() }
def accept(self, visitor): try: visit_method_name = self._VISIT_METHOD_NAME_CACHE[self.__class__.__name__] except KeyError: visit_method_name = 'visit_' + decamelize(self.__class__.__name__) self._VISIT_METHOD_NAME_CACHE[self.__class__.__name__] = visit_method_name return getattr(visitor, visit_method_name)(self)
def _save_to_dir(self, out_dir_path): root_out_dir_path = out_dir_path try: py_namespace = self._py_namespace() out_dir_path = os.path.join(out_dir_path, py_namespace.replace('.', os.path.sep)) try: if py_namespace == self.namespace_by_scope( 'py').name and self.document_root_dir_path is not None: document_relpath = os.path.relpath( os.path.dirname(self.path), self.document_root_dir_path) out_dir_relpath = os.path.relpath(out_dir_path, root_out_dir_path) if out_dir_relpath != document_relpath: self._logger.warn( "Python module %s (relative directory %s) does not match .thrift file path %s (relative directory %s)", py_namespace, out_dir_relpath, self.path, document_relpath) except KeyError: pass except KeyError: pass if len(self.definitions) == 1: out_file_name = decamelize(self.definitions[0].py_name()) + '.py' else: out_file_name = self.name + '.py' return self._save_to_file(os.path.join(out_dir_path, out_file_name))
def sql_create_table(self): column_definitions = [] foreign_key_definitions = [] for annotation_i, annotation in enumerate(self.annotations): if annotation.name == 'sql_column': column_definitions.append(annotation.value) elif annotation.name == 'sql_foreign_key': if annotation_i == 0: raise ValueError('sql_foreign_key annotation on a struct must follow a sql_column annotation') elif self.annotations[annotation_i - 1].name != 'sql_column': raise ValueError("sql_foreign_key annotation on a struct must follow a sql_column annotation, not " + self.annotations[annotation_i - 1].name) foreign_key_definitions.append(SqlField.sql_foreign_key_definition_static( column_name=self.annotations[annotation_i - 1].value.split(' ', 1)[0], foreign_table_name=annotation.value[0], foreign_column_name=annotation.value[1] )) for field in self.fields: column_definition = field.sql_column_definition() if column_definition is not None: column_definitions.append(column_definition) for field in self.fields: foreign_key_definition = field.sql_foreign_key_definition() if foreign_key_definition is not None: foreign_key_definitions.append(foreign_key_definition) column_definitions.extend(foreign_key_definitions) column_definitions = lpad(",\n ", ",\n ".join(column_definitions)) name = decamelize(self.name) return """\ CREATE TABLE IF NOT EXISTS %(name)s( id INTEGER PRIMARY KEY AUTO_INCREMENT NOT NULL%(column_definitions)s )""" % locals()
def _py_method_update(self): if len(self.fields) == 0: return {} name = self.py_name() other_name = decamelize(self.py_name()) object_updates = \ "\n".join(indent(' ' * 8, ("self.%s(%s.%s)" % (field.py_setter_name(), other_name, field.py_getter_call()) for field in self.fields) )) return { 'update': """\ def update(self, %(other_name)s): if isinstance(%(other_name)s, %(name)s): %(object_updates)s elif isinstance(%(other_name)s, dict): for key, value in %(other_name)s.iteritems(): getattr(self, 'set_' + key)(value) else: raise TypeError(%(other_name)s) return self """ % locals() }
def accept(self, visitor): try: visit_method_name = self._VISIT_METHOD_NAME_CACHE[self.__class__.__name__] except KeyError: visit_method_name = "visit_" + decamelize(self.__class__.__name__) self._VISIT_METHOD_NAME_CACHE[self.__class__.__name__] = visit_method_name return getattr(visitor, visit_method_name)(self)
def DEC_REF(self, object_type=None, object_instance=None): if object_type is None: object_type = self.http_message_type if object_instance is None: object_instance = decamelize(object_type) self.append("%(object_type)s::dec_ref(%(object_instance)s);" % locals())
def __py_method_init(self): name = self._py_name() service_endpoint_name = decamelize(PyService.py_name(self)).rsplit( '_', 1)[0] service_qname = PyService.py_qname(self) return { '__init__': """\ def __init__(self, api_url, headers=None): %(service_qname)s.__init__(self) if headers is None: headers = {} else: if not isinstance(headers, dict): raise TypeError(headers) headers = headers.copy() api_url = api_url.rstrip('/') if not api_url.endswith('/jsonrpc/%(service_endpoint_name)s'): api_url += '/jsonrpc/%(service_endpoint_name)s' self.__api_url = api_url.rstrip('/') parsed_api_url = urlparse(api_url) parsed_api_url_netloc = parsed_api_url.netloc.split('@', 1) if len(parsed_api_url_netloc) == 2: username_password = parsed_api_url_netloc[0].split(':', 1) if len(username_password) == 2: username, password = username_password netloc = parsed_api_url_netloc[1] headers['Authorization'] = \\ 'Basic ' + \\ base64.b64encode( "%%s:%%s" %% ( username, password ) ) self.__api_url = \\ parsed_api_url.scheme + '://' + netloc + \\ parsed_api_url.path + \\ parsed_api_url.query # auth_handler = urllib2.HTTPBasicAuthHandler() # auth_handler.add_password(realm='Realm', # uri=self.__api_url, # user=username, # passwd=password) # opener = urllib2.build_opener(auth_handler) # urllib2.install_opener(opener) self.__headers = headers self.__logger = logging.getLogger(self.__class__.__module__ + '.' + self.__class__.__name__) self.__next_id = 1 """ % locals() }
def _py_method_init(self): api_url_default = self._parent_generator()._api_url_default if api_url_default is not None: api_url_parameter = 'api_url=None' set_api_url_default = """ if api_url is None: api_url = %(api_url_default)s""" % locals() else: api_url_parameter = 'api_url' set_api_url_default = '' name = self.py_name() service_endpoint_name = decamelize(PyService.py_name(self)).rsplit( '_', 1)[0] service_qname = PyService.py_qname(self) return { '__init__': """\ def __init__(self, %(api_url_parameter)s, headers=None): %(service_qname)s.__init__(self)%(set_api_url_default)s if headers is None: headers = {} else: if not isinstance(headers, dict): raise TypeError(headers) headers = headers.copy() api_url = api_url.rstrip('/') if not api_url.endswith('/jsonrpc/%(service_endpoint_name)s'): api_url += '/jsonrpc/%(service_endpoint_name)s' self.__api_url = api_url.rstrip('/') parsed_api_url = urlparse(api_url) parsed_api_url_netloc = parsed_api_url.netloc.split('@', 1) if len(parsed_api_url_netloc) == 2: username_password = parsed_api_url_netloc[0].split(':', 1) if len(username_password) == 2: username, password = username_password netloc = parsed_api_url_netloc[1] headers['Authorization'] = \\ 'Basic ' + \\ base64.b64encode( "%%s:%%s" %% ( username, password ) ) self.__api_url = \\ parsed_api_url.scheme + '://' + netloc + \\ parsed_api_url.path + \\ parsed_api_url.query self.__headers = headers self.__next_id = 1 """ % locals() }
def __init__(self): HTTPMessageParserTest.__init__(self) self.PARSER("GET", " ", URI, " ", HTTP_VERSION, CRLF, "Host:", CRLF, CRLF) self.PARSE() self.ASSERT_NONNULL() self.ASSERT_HTTP_VERSION() http_message_instance = decamelize(self.http_message_type) self.append("""ASSERT_EQ((*%(http_message_instance)s)["Host"], "");""" % locals()) self.DEC_REF()
def __py_method_init(self): name = self._py_name() service_endpoint_name = decamelize(PyService.py_name(self)).rsplit('_', 1)[0] service_qname = PyService.py_qname(self) return {'__init__': """\ def __init__(self, api_url, headers=None): %(service_qname)s.__init__(self) if headers is None: headers = {} else: if not isinstance(headers, dict): raise TypeError(headers) headers = headers.copy() api_url = api_url.rstrip('/') if not api_url.endswith('/jsonrpc/%(service_endpoint_name)s'): api_url += '/jsonrpc/%(service_endpoint_name)s' self.__api_url = api_url.rstrip('/') parsed_api_url = urlparse(api_url) parsed_api_url_netloc = parsed_api_url.netloc.split('@', 1) if len(parsed_api_url_netloc) == 2: username_password = parsed_api_url_netloc[0].split(':', 1) if len(username_password) == 2: username, password = username_password netloc = parsed_api_url_netloc[1] headers['Authorization'] = \\ 'Basic ' + \\ base64.b64encode( "%%s:%%s" %% ( username, password ) ) self.__api_url = \\ parsed_api_url.scheme + '://' + netloc + \\ parsed_api_url.path + \\ parsed_api_url.query # auth_handler = urllib2.HTTPBasicAuthHandler() # auth_handler.add_password(realm='Realm', # uri=self.__api_url, # user=username, # passwd=password) # opener = urllib2.build_opener(auth_handler) # urllib2.install_opener(opener) self.__headers = headers self.__logger = logging.getLogger(self.__class__.__module__ + '.' + self.__class__.__name__) self.__next_id = 1 """ % locals()}
def _py_method_init(self): api_url_default = self._parent_generator()._api_url_default if api_url_default is not None: api_url_parameter = 'api_url=None' set_api_url_default = """ if api_url is None: api_url = %(api_url_default)s""" % locals() else: api_url_parameter = 'api_url' set_api_url_default = '' name = self.py_name() service_endpoint_name = decamelize(PyService.py_name(self)).rsplit('_', 1)[0] service_qname = PyService.py_qname(self) return {'__init__': """\ def __init__(self, %(api_url_parameter)s, headers=None): %(service_qname)s.__init__(self)%(set_api_url_default)s if headers is None: headers = {} else: if not isinstance(headers, dict): raise TypeError(headers) headers = headers.copy() api_url = api_url.rstrip('/') if not api_url.endswith('/jsonrpc/%(service_endpoint_name)s'): api_url += '/jsonrpc/%(service_endpoint_name)s' self.__api_url = api_url.rstrip('/') parsed_api_url = urlparse(api_url) parsed_api_url_netloc = parsed_api_url.netloc.split('@', 1) if len(parsed_api_url_netloc) == 2: username_password = parsed_api_url_netloc[0].split(':', 1) if len(username_password) == 2: username, password = username_password netloc = parsed_api_url_netloc[1] headers['Authorization'] = \\ 'Basic ' + \\ base64.b64encode( "%%s:%%s" %% ( username, password ) ) self.__api_url = \\ parsed_api_url.scheme + '://' + netloc + \\ parsed_api_url.path + \\ parsed_api_url.query self.__headers = headers self.__next_id = 1 """ % locals()}
def ts_from_json(self, value): class_name_split = decamelize(self.__class__.__name__).split('_') assert len(class_name_split) == 3 assert class_name_split[0] == 'ts' assert class_name_split[2] == 'type' element_from_json = self.element_type.ts_from_json('json[i]') qname = self.ts_qname() return_value = 'sequence' if isinstance(self.element_type, TsStructType): return_value = "new Backbone.Collection<%s>(%s)" % (self.element_type.ts_qname(), return_value) return_value_type_qname = self.element_type.ts_qname() + '[]' return """function(json: any[]): %(qname)s { var sequence: %(return_value_type_qname)s = []; for (var i = 0; i < json.length; i++) { sequence.push(%(element_from_json)s); } return %(return_value)s; }(%(value)s)""" % locals()
def ts_to_json(self, value): class_name_split = decamelize(self.__class__.__name__).split('_') assert len(class_name_split) == 3 assert class_name_split[0] == 'ts' assert class_name_split[2] == 'type' if isinstance(self.element_type, TsStructType): value = value + '.models' array_qname = self.element_type.ts_qname() + '[]' element_to_json = self.element_type.ts_to_json("__inArray[__i]" % locals()) type_name = class_name_split[1].capitalize() return """\ function (__inArray: %(array_qname)s): any[] { var __outArray: any[] = []; for (var __i = 0; __i < __inArray.length; __i++) { __outArray.push(%(element_to_json)s); } return __outArray; }(%(value)s)""" % locals()
def __init__(self): HTTPMessageParserTest.__init__(self) self.PARSER( "GET", ' ', URI, ' ', HTTP_VERSION, CRLF, "Host:", CRLF, CRLF ) self.PARSE() self.ASSERT_NONNULL() self.ASSERT_HTTP_VERSION() http_message_instance = decamelize(self.http_message_type) self.append("""ASSERT_EQ((*%(http_message_instance)s)["Host"], "");""" % locals()) self.DEC_REF()
def js_to_json(self, value): class_name_split = decamelize(self.__class__.__name__).split('_') assert len(class_name_split) == 3 assert class_name_split[0] == 'js' assert class_name_split[2] == 'type' if self.element_type.js_is_model(): elements_property = '.models' else: elements_property = '' element_to_json = self.element_type.js_to_json("__inArray[__i]" % locals()) type_name = class_name_split[1].capitalize() return """\ function (__inArray) { var __outArray = new Array(); for (var __i = 0; __i < __inArray.length; __i++) { __outArray.push(%(element_to_json)s); } return __outArray; }(%(value)s%(elements_property)s)""" % locals()
def js_from_json(self, value): class_name_split = decamelize(self.__class__.__name__).split('_') assert len(class_name_split) == 3 assert class_name_split[0] == 'js' assert class_name_split[2] == 'type' element_from_json = self.element_type.js_from_json('json[i]') if isinstance(self.element_type, _JsCompoundType): element_type_qname = self.element_type.js_qname() return_value = "new Backbone.Collection(sequence, {model: %(element_type_qname)s})" % locals() else: return_value = 'sequence' type_name = class_name_split[1].capitalize() return """function(json) { var sequence = new Array(); for (var i = 0; i < json.length; i++) { sequence.push(%(element_from_json)s); } return %(return_value)s; }(%(value)s)""" % locals()
def js_read_protocol(self): class_name_split = decamelize(self.__class__.__name__).split('_') assert len(class_name_split) == 3 assert class_name_split[0] == 'js' assert class_name_split[2] == 'type' element_ttype_id = self.element_type.thrift_ttype_id() element_read_protocol = self.element_type.js_read_protocol() if isinstance(self.element_type, _JsCompoundType): element_type_qname = self.element_type.js_qname() return_value = "new Backbone.Collection(sequence, {model: %(element_type_qname)s})" % locals() else: return_value = 'sequence' type_name = class_name_split[1].capitalize() return """function(iprot) { var sequenceBegin = iprot.read%(type_name)sBegin(); var sequence = new Array(); for (var i = 0; i < sequenceBegin.size; i++) { sequence.push(%(element_read_protocol)s); } iprot.read%(type_name)sEnd(); return %(return_value)s; }(iprot)""" % locals()
def ts_to_json(self, value): class_name_split = decamelize(self.__class__.__name__).split('_') assert len(class_name_split) == 3 assert class_name_split[0] == 'ts' assert class_name_split[2] == 'type' if isinstance(self.element_type, TsStructType): value = value + '.models' array_qname = self.element_type.ts_qname() + '[]' element_to_json = self.element_type.ts_to_json("__inArray[__i]" % locals()) type_name = class_name_split[1].capitalize() return """\ function (__inArray: %(array_qname)s): any[] { var __outArray: any[] = []; for (var __i = 0; __i < __inArray.length; __i++) { __outArray.push(%(element_to_json)s); } return __outArray; }(%(value)s)""" % locals( )
def ts_from_json(self, value): class_name_split = decamelize(self.__class__.__name__).split('_') assert len(class_name_split) == 3 assert class_name_split[0] == 'ts' assert class_name_split[2] == 'type' element_from_json = self.element_type.ts_from_json('json[i]') qname = self.ts_qname() return_value = 'sequence' if isinstance(self.element_type, TsStructType): return_value = "new Backbone.Collection<%s>(%s)" % ( self.element_type.ts_qname(), return_value) return_value_type_qname = self.element_type.ts_qname() + '[]' return """function(json: any[]): %(qname)s { var sequence: %(return_value_type_qname)s = []; for (var i = 0; i < json.length; i++) { sequence.push(%(element_from_json)s); } return %(return_value)s; }(%(value)s)""" % locals( )
def js_from_json(self, value): class_name_split = decamelize(self.__class__.__name__).split('_') assert len(class_name_split) == 3 assert class_name_split[0] == 'js' assert class_name_split[2] == 'type' element_from_json = self.element_type.js_from_json('json[i]') if isinstance(self.element_type, _JsCompoundType): element_type_qname = self.element_type.js_qname() return_value = "new Backbone.Collection(sequence, {model: %(element_type_qname)s})" % locals( ) else: return_value = 'sequence' type_name = class_name_split[1].capitalize() return """function(json) { var sequence = new Array(); for (var i = 0; i < json.length; i++) { sequence.push(%(element_from_json)s); } return %(return_value)s; }(%(value)s)""" % locals( )
def js_to_json(self, value): class_name_split = decamelize(self.__class__.__name__).split('_') assert len(class_name_split) == 3 assert class_name_split[0] == 'js' assert class_name_split[2] == 'type' if self.element_type.js_is_model(): elements_property = '.models' else: elements_property = '' element_to_json = self.element_type.js_to_json("__inArray[__i]" % locals()) type_name = class_name_split[1].capitalize() return """\ function (__inArray) { var __outArray = new Array(); for (var __i = 0; __i < __inArray.length; __i++) { __outArray.push(%(element_to_json)s); } return __outArray; }(%(value)s%(elements_property)s)""" % locals( )
def js_read_protocol(self): class_name_split = decamelize(self.__class__.__name__).split('_') assert len(class_name_split) == 3 assert class_name_split[0] == 'js' assert class_name_split[2] == 'type' element_ttype_id = self.element_type.thrift_ttype_id() element_read_protocol = self.element_type.js_read_protocol() if isinstance(self.element_type, _JsCompoundType): element_type_qname = self.element_type.js_qname() return_value = "new Backbone.Collection(sequence, {model: %(element_type_qname)s})" % locals( ) else: return_value = 'sequence' type_name = class_name_split[1].capitalize() return """function(iprot) { var sequenceBegin = iprot.read%(type_name)sBegin(); var sequence = new Array(); for (var i = 0; i < sequenceBegin.size; i++) { sequence.push(%(element_read_protocol)s); } iprot.read%(type_name)sEnd(); return %(return_value)s; }(iprot)""" % locals( )
def _java_method_load(self): name = self.java_name() field_initializers = indent(' ' * 4, "\n\n".join(field.java_property_initializer() for field in self.fields)) field_thrift_names = ', '.join('"%s"' % field.name for field in self.fields) field_values = ', '.join(field.java_name() for field in self.fields) project_name = self._parent_generator()._project_name project_name_upper = self._parent_generator()._project_name.upper() properties_file_name = decamelize(self.name) if properties_file_name.endswith('_properties'): properties_file_name = properties_file_name[:-len('_properties')] properties_file_name = properties_file_name + '.properties' return {'load': """ public static %(name)s load() { return load(com.google.common.base.Optional.<java.io.File> absent()); } public static %(name)s load(final com.google.common.base.Optional<java.io.File> commandLinePropertiesFilePath) { java.util.Properties __properties = new java.util.Properties(); String[] __propertyNames = {%(field_thrift_names)s}; for (final String propertyName : __propertyNames) { final String propertyValue = System.getenv("%(project_name_upper)s_" + propertyName.toUpperCase()); if (propertyValue != null) { __properties.put(propertyName, propertyValue); } } __properties = __mergeProperties(__properties, __readProperties("%(properties_file_name)s")); __properties = __mergeProperties(__properties, __readProperties(new java.io.File( "/etc/%(project_name)s/%(properties_file_name)s"))); __properties = __mergeProperties(__properties, __readProperties(new java.io.File( new java.io.File(new java.io.File(System.getProperty("user.home")), ".%(project_name)s"), "%(properties_file_name)s"))); if (commandLinePropertiesFilePath.isPresent()) { __properties = __mergeProperties(__properties, __readProperties(commandLinePropertiesFilePath.get())); } %(field_initializers)s for (final java.util.Map.Entry<Object, Object> entry : __properties.entrySet()) { throw new RuntimeException("properties file(s) have unknown property " + entry.getKey().toString()); } return %(name)s.create(%(field_values)s); }""" % locals()}
def sql_create_table(self): column_definitions = [] foreign_key_definitions = [] for field in self.fields: column_definition = field.sql_column_definition() if column_definition is not None: column_definitions.append(column_definition) for field in self.fields: foreign_key_definition = field.sql_foreign_key_definition() if foreign_key_definition is not None: foreign_key_definitions.append(foreign_key_definition) column_definitions.extend(foreign_key_definitions) column_definitions = lpad(",\n ", ",\n ".join(column_definitions)) name = decamelize(self.name) return """\ CREATE TABLE IF NOT EXISTS %(name)s( id INTEGER PRIMARY KEY AUTO_INCREMENT NOT NULL%(column_definitions)s )""" % locals()
def py_write_protocol(self, value, depth=0): class_name_split = decamelize(self.__class__.__name__).split('_') assert len(class_name_split) == 3 assert class_name_split[0] == 'py' assert class_name_split[2] == 'type' element_ttype_id = self.element_type.thrift_ttype_id() element_write_protocol = \ indent(' ' * 4, self.element_type.py_write_protocol( "_%(depth)u" % locals(), depth=depth + 1 ) ) type_name = class_name_split[1] return """\ oprot.write_%(type_name)s_begin(%(element_ttype_id)u, len(%(value)s)) for _%(depth)u in %(value)s: %(element_write_protocol)s oprot.write_%(type_name)s_end()""" % locals()
def js_write_protocol(self, value, depth=0): class_name_split = decamelize(self.__class__.__name__).split('_') assert len(class_name_split) == 3 assert class_name_split[0] == 'js' assert class_name_split[2] == 'type' element_ttype_id = self.element_type.thrift_ttype_id() if self.element_type.js_is_model(): elements_property = '.models' else: elements_property = '' element_write_protocol = \ indent(' ' * 4, self.element_type.js_write_protocol("__sequence%(depth)u%(elements_property)s[__i%(depth)u]" % locals(), depth=depth + 1)) type_name = class_name_split[1].capitalize() return """\ var __sequence%(depth)u = %(value)s; oprot.write%(type_name)sBegin(%(element_ttype_id)u, __sequence%(depth)u%(elements_property)s.length); for (var __i%(depth)u = 0; __i%(depth)u < __sequence%(depth)u%(elements_property)s.length; __i%(depth)u++) { %(element_write_protocol)s } oprot.write%(type_name)sEnd();""" % locals()
def _save_to_dir(self, out_dir_path): root_out_dir_path = out_dir_path try: py_namespace = self._py_namespace() out_dir_path = os.path.join(out_dir_path, py_namespace.replace('.', os.path.sep)) try: if py_namespace == self.namespace_by_scope('py').name and self.document_root_dir_path is not None: document_relpath = os.path.relpath(os.path.dirname(self.path), self.document_root_dir_path) out_dir_relpath = os.path.relpath(out_dir_path, root_out_dir_path) if out_dir_relpath != document_relpath: self._logger.warn("Python module %s (relative directory %s) does not match .thrift file path %s (relative directory %s)", py_namespace, out_dir_relpath, self.path, document_relpath) except KeyError: pass except KeyError: pass if len(self.definitions) == 1: out_file_name = decamelize(self.definitions[0].py_name()) + '.py' else: out_file_name = self.name + '.py' return self._save_to_file(os.path.join(out_dir_path, out_file_name))
def sql_create_table(self): column_definitions = [] foreign_key_definitions = [] for annotation_i, annotation in enumerate(self.annotations): if annotation.name == "sql_column": column_definitions.append(annotation.value) elif annotation.name == "sql_foreign_key": if annotation_i == 0: raise ValueError("sql_foreign_key annotation on a struct must follow a sql_column annotation") elif self.annotations[annotation_i - 1].name != "sql_column": raise ValueError( "sql_foreign_key annotation on a struct must follow a sql_column annotation, not " + self.annotations[annotation_i - 1].name ) foreign_key_definitions.append( SqlField.sql_foreign_key_definition_static( column_name=self.annotations[annotation_i - 1].value.split(" ", 1)[0], foreign_table_name=annotation.value[0], foreign_column_name=annotation.value[1], ) ) for field in self.fields: column_definition = field.sql_column_definition() if column_definition is not None: column_definitions.append(column_definition) for field in self.fields: foreign_key_definition = field.sql_foreign_key_definition() if foreign_key_definition is not None: foreign_key_definitions.append(foreign_key_definition) column_definitions.extend(foreign_key_definitions) column_definitions = lpad(",\n ", ",\n ".join(column_definitions)) name = decamelize(self.name) return ( """\ CREATE TABLE IF NOT EXISTS %(name)s( id INTEGER PRIMARY KEY AUTO_INCREMENT NOT NULL%(column_definitions)s )""" % locals() )
def __java_markers(self): service_marker_variable_name = service_marker_name = decamelize(self.name).upper() add_function_markers = [] function_markers = [] for function in self.functions: function_marker_variable_name = function.java_marker_variable_name() function_markers.append( "public final static org.slf4j.Marker %s = org.slf4j.MarkerFactory.getMarker(\"%s\");" % ( function_marker_variable_name, function.java_marker_name() )) add_function_markers.append("%(service_marker_variable_name)s.add(%(function_marker_variable_name)s);" % locals()) add_function_markers = "\n".join(indent(' ' * 8, add_function_markers)) function_markers = "\n".join(indent(' ' * 4, function_markers)) return """\ public static class Markers { %(function_markers)s public final static org.slf4j.Marker %(service_marker_variable_name)s = org.slf4j.MarkerFactory.getMarker("%(service_marker_name)s"); static { %(add_function_markers)s } }""" % locals()
def _compile_thrift_file(self, thrift_file_path, document_root_dir_path=None, generator=None, out=None): try: for i in xrange(2): if generator is not None: gen = generator.__class__.__name__ gen = gen[:gen.index('Generator')] gen = decamelize(gen) if len(self.__gen) > 0 and gen not in self.__gen: return else: generator = Generator() try: document = \ self.__compiler.compile( document_root_dir_path=document_root_dir_path, generator=generator, thrift_file_path=thrift_file_path, ) except (ScanException, CompileException): if self.__debug: raise if i == 0: logging.basicConfig(level=logging.DEBUG) continue # Try again with debugging on else: raise if out is not None: document.save(out) elif isinstance(generator, LintGenerator): document.lint() return document except: logging.error("exception compiling %s", thrift_file_path) raise
def ASSERT_BODY_NULL(self, http_message_instance=None): if http_message_instance is None: http_message_instance = decamelize(self.http_message_type) self.append("ASSERT_EQ(%(http_message_instance)s->get_body(), static_cast<Object*>(NULL));" % locals())
def ASSERT_BODY2(self, http_message_instance=None): if http_message_instance is None: http_message_instance = decamelize(self.http_message_type) self.append("ASSERT_EQ(static_cast<Buffer*>(%(http_message_instance)s->get_body())->size(), 2u);" % locals())
def ASSERT_HTTP_VERSION(self, http_message_instance=None, http_version="1"): if http_message_instance is None: http_message_instance = decamelize(self.http_message_type) self.append("ASSERT_EQ(%(http_message_instance)s->get_http_version(), %(http_version)s);" % locals())
def ASSERT_HOST_FIELD(self, http_message_instance=None): if http_message_instance is None: http_message_instance = decamelize(self.http_message_type) HOST = globals()["HOST"] self.append("""ASSERT_EQ((*%(http_message_instance)s)["Host"], "%(HOST)s");""" % locals())
def _java_interface_simple_name(self): class_name_split = decamelize(self.__class__.__name__).split("_") assert len(class_name_split) == 3 assert class_name_split[0] == "java" assert class_name_split[2] == "type" return class_name_split[1].capitalize()
def PARSER(self, *args): args = "".join(args) http_message_type = self.http_message_type http_message_type_lower_case = decamelize(self.http_message_type) self.append("""%(http_message_type)sParser %(http_message_type_lower_case)s_parser(\"%(args)s\");""" % locals())
def accept(self, visitor): return getattr(visitor, 'visit_' + decamelize(self.__class__.__name__))(self)
def _java_interface_simple_name(self): class_name_split = decamelize(self.__class__.__name__).split('_') assert len(class_name_split) == 3 assert class_name_split[0] == 'java' assert class_name_split[2] == 'type' return class_name_split[1].capitalize()
def ts_repr(self): for parameter in self.parameters: assert parameter.name != 'error', self.parent.name assert parameter.name != 'success', self.parent.name name = self.name ts_name = self.ts_name() parameters = [parameter.ts_parameter() for parameter in self.parameters] async_call_setup = [] sync_call_setup = [] if len(self.parameters) > 0: parameters_to_json = [] for parameter in self.parameters: parameter_json_name = parameter.name parameter_ts_name = parameter.ts_name() if parameter.id is not None: parameter_json_name = str(parameter.id) + ':' + parameter_json_name parameter_to_json = """__jsonrpc_params["%(parameter_json_name)s"] = """ % locals() + parameter.type.ts_to_json("kwds.%(parameter_ts_name)s" % locals()) + ';' if not parameter.required: parameter_to_json = indent(' ' * 4, parameter_to_json) parameter_to_json = """\ if (typeof kwds.%(parameter_ts_name)s !== "undefined") { %(parameter_to_json)s }""" parameters_to_json.append(parameter_to_json) parameters_to_json = "\n".join(parameters_to_json) jsonrpc_params_setup = """\ var __jsonrpc_params: {[index: string]: any} = {}; %(parameters_to_json)s """ % locals() async_call_setup.append(jsonrpc_params_setup) sync_call_setup.append(jsonrpc_params_setup) jsonrpc_params = '__jsonrpc_params' else: jsonrpc_params = '{}' async_parameters = list(parameters) async_parameters.append('error: (jqXHR: JQueryXHR, textStatus: string, errorThrown: string) => any') if self.return_field is not None: return_field_parameter = self.return_field.ts_parameter() async_parameters.append('success: (%(return_field_parameter)s) => void' % locals()) return_value = self.return_field.type.ts_from_json('__response.result') sync_return_type_qname = self.return_field.type.ts_qname() sync_call_setup.append("var returnValue: %(sync_return_type_qname)s = null;" % locals()) sync_return = "\n\n return returnValue;" sync_return_value_assignment = indent(' ' * 12, """\ if (typeof __response.result !== "undefined") { returnValue = %(return_value)s; } else { throw new Error(__response.error); }""" % locals()) else: async_parameters.append('success: () => void') return_value = '' sync_return = '' sync_return_type_qname = 'void' sync_return_value_assignment = indent(' ' * 12, """\ if (typeof __response.result === "undefined") { throw new Error(__response.error); }""" % locals()) async_call_setup = pad("\n", indent(' ' * 4, "\n".join(async_call_setup)), "\n") async_parameters = "kwds: {%s}" % ', '.join(async_parameters) sync_call_setup = pad("\n", indent(' ' * 4, "\n".join(sync_call_setup)), "\n") sync_parameters = "kwds: {%s}" % ', '.join(parameters) if len(parameters) > 0 else '' jsonrpc_url = '\'/api/jsonrpc/' if self.parent.name.endswith('Service'): jsonrpc_url += '_'.join(decamelize(self.parent.name).split('_')[:-1]) else: jsonrpc_url += decamelize(self.parent.name) jsonrpc_url += '\'' return """\ %(ts_name)sAsync(%(async_parameters)s): void {%(async_call_setup)s $.ajax({ async:true, data:JSON.stringify({ jsonrpc:'2.0', method:'%(name)s', params:%(jsonrpc_params)s, id:'1234' }), dataType:'json', error:function(jqXHR, textStatus, errorThrown) { kwds.error(jqXHR, textStatus, errorThrown); }, mimeType:'application/json', type:'POST', success:function(__response) { if (typeof __response.result !== "undefined") { kwds.success(%(return_value)s); } else { kwds.error(null, __response.error.message, null); } }, url:%(jsonrpc_url)s, }); } %(ts_name)sSync(%(sync_parameters)s): %(sync_return_type_qname)s {%(sync_call_setup)s $.ajax({ async:false, data:JSON.stringify({ jsonrpc:'2.0', method:'%(name)s', params:%(jsonrpc_params)s, id:'1234' }), dataType:'json', error:function(jqXHR, textStatus, errorThrown) { throw new Error(errorThrown); }, mimeType:'application/json', type:'POST', success:function(__response) { %(sync_return_value_assignment)s }, url:%(jsonrpc_url)s, });%(sync_return)s }""" % locals()
def ASSERT_NONNULL(self, object_instance=None): if object_instance is None: object_instance = decamelize(self.http_message_type) self.append("ASSERT_NE(%(object_instance)s, static_cast<Object*>(NULL));" % locals())