def _get_namespace_route_imports(self, namespace, include_route_args=True, include_route_deep_args=False): result = [] def _unpack_and_store_data_type(data_type): data_type, _ = unwrap_nullable(data_type) if is_list_type(data_type): while is_list_type(data_type): data_type = data_type.data_type if not is_void_type(data_type) and is_user_defined_type(data_type): result.append(data_type) for route in namespace.routes: if include_route_args: data_type, _ = unwrap_nullable(route.arg_data_type) _unpack_and_store_data_type(data_type) elif include_route_deep_args: data_type, _ = unwrap_nullable(route.arg_data_type) if is_union_type(data_type) or is_list_type(data_type): _unpack_and_store_data_type(data_type) elif not is_void_type(data_type): for field in data_type.all_fields: data_type, _ = unwrap_nullable(field.data_type) if (is_struct_type(data_type) or is_union_type(data_type) or is_list_type(data_type)): _unpack_and_store_data_type(data_type) _unpack_and_store_data_type(route.result_data_type) _unpack_and_store_data_type(route.error_data_type) return result
def _get_namespace_route_imports(self, namespace, include_route_args=True, include_route_deep_args=False): result = [] def _unpack_and_store_data_type(data_type): data_type, _ = unwrap_nullable(data_type) if is_list_type(data_type): while is_list_type(data_type): data_type, _ = unwrap_nullable(data_type.data_type) if not is_void_type(data_type) and is_user_defined_type(data_type): result.append(data_type) for route in namespace.routes: if include_route_args: data_type, _ = unwrap_nullable(route.arg_data_type) _unpack_and_store_data_type(data_type) elif include_route_deep_args: data_type, _ = unwrap_nullable(route.arg_data_type) if is_union_type(data_type) or is_list_type(data_type): _unpack_and_store_data_type(data_type) elif not is_void_type(data_type): for field in data_type.all_fields: data_type, _ = unwrap_nullable(field.data_type) if (is_struct_type(data_type) or is_union_type(data_type) or is_list_type(data_type)): _unpack_and_store_data_type(data_type) _unpack_and_store_data_type(route.result_data_type) _unpack_and_store_data_type(route.error_data_type) return result
def fmt_type(data_type, tag=False, has_default=False, no_ptr=False, is_prop=False): data_type, nullable = unwrap_nullable(data_type) if is_user_defined_type(data_type): base = '{}' if no_ptr else '{} *' result = base.format(fmt_class_prefix(data_type)) else: result = _primitive_table.get(data_type.__class__, fmt_class(data_type.name)) if is_list_type(data_type): data_type, _ = unwrap_nullable(data_type.data_type) base = '<{}>' if no_ptr else '<{}> *' result = result + base.format(fmt_type(data_type)) elif is_map_type(data_type): data_type, _ = unwrap_nullable(data_type.value_data_type) base = '<NSString *, {}>' if no_ptr else '<NSString *, {}> *' result = result + base.format(fmt_type(data_type)) if tag: if (nullable or has_default) and not is_prop: result = 'nullable ' + result return result
def _unpack_and_store_data_type(data_type): data_type, _ = unwrap_nullable(data_type) if is_list_type(data_type): while is_list_type(data_type): data_type, _ = unwrap_nullable(data_type.data_type) if not is_void_type(data_type) and is_user_defined_type(data_type): result.append(data_type)
def _get_imports_h(self, data_types): """Emits all necessary header file imports for the given Stone data type.""" if not isinstance(data_types, list): data_types = [data_types] import_classes = [] for data_type in data_types: if is_user_defined_type(data_type): import_classes.append(fmt_class_prefix(data_type)) for field in data_type.all_fields: data_type, _ = unwrap_nullable(field.data_type) # unpack list or map while is_list_type(data_type) or is_map_type(data_type): data_type = (data_type.value_data_type if is_map_type(data_type) else data_type.data_type) if is_user_defined_type(data_type): import_classes.append(fmt_class_prefix(data_type)) import_classes = list(set(import_classes)) import_classes.sort() return import_classes
def _get_route_args(self, namespace, route, tag=False): # pylint: disable=unused-argument """Returns a list of name / value string pairs representing the arguments for a particular route.""" data_type, _ = unwrap_nullable(route.arg_data_type) if is_struct_type(data_type): arg_list = [] for field in data_type.all_fields: arg_list.append((fmt_var(field.name), fmt_type( field.data_type, tag=tag, has_default=field.has_default))) doc_list = [(fmt_var(f.name), self.process_doc(f.doc, self._docf)) for f in data_type.fields if f.doc] elif is_union_type(data_type): arg_list = [(fmt_var(data_type.name), fmt_type( route.arg_data_type, tag=tag))] doc_list = [(fmt_var(data_type.name), self.process_doc(data_type.doc, self._docf) if data_type.doc else 'The {} union'.format( fmt_class(data_type .name)))] else: arg_list = [] doc_list = [] return arg_list, doc_list
def _get_default_route_args( self, namespace, # pylint: disable=unused-argument route, tag=False): """Returns a list of name / value string pairs representing the default arguments for a particular route.""" data_type, _ = unwrap_nullable(route.arg_data_type) if is_struct_type(data_type): arg_list = [] for field in data_type.all_fields: if not field.has_default and not is_nullable_type( field.data_type): arg_list.append((fmt_var(field.name), fmt_type( field.data_type, tag=tag))) doc_list = ([(fmt_var(f.name), self.process_doc(f.doc, self._docf)) for f in data_type.fields if f.doc and not f.has_default and not is_nullable_type(f.data_type)]) else: arg_list = [] doc_list = [] return arg_list, doc_list
def _get_imports_m(self, data_types, default_imports): """Emits all necessary implementation file imports for the given Stone data type.""" if not isinstance(data_types, list): data_types = [data_types] import_classes = default_imports for data_type in data_types: import_classes.append(fmt_class_prefix(data_type)) if data_type.parent_type: import_classes.append(fmt_class_prefix(data_type.parent_type)) if is_struct_type( data_type) and data_type.has_enumerated_subtypes(): for _, subtype in data_type.get_all_subtypes_with_tags(): import_classes.append(fmt_class_prefix(subtype)) for field in data_type.all_fields: data_type, _ = unwrap_nullable(field.data_type) # unpack list or map while is_list_type(data_type) or is_map_type(data_type): data_type = (data_type.value_data_type if is_map_type(data_type) else data_type.data_type) if is_user_defined_type(data_type): import_classes.append(fmt_class_prefix(data_type)) if import_classes: import_classes = list(set(import_classes)) import_classes.sort() return import_classes
def _needs_base_type(data_type): data_type, _ = unwrap_nullable(data_type) if is_struct_type(data_type) and data_type.has_enumerated_subtypes(): return True if is_list_type(data_type): return _needs_base_type(data_type.data_type) return False
def generate_validator_constructor(ns, data_type): """ Given a Stone data type, returns a string that can be used to construct the appropriate validation object in Python. """ dt, nullable_dt = unwrap_nullable(data_type) if is_list_type(dt): v = generate_func_call( 'bv.List', args=[generate_validator_constructor(ns, dt.data_type)], kwargs=[('min_items', dt.min_items), ('max_items', dt.max_items)], ) elif is_map_type(dt): v = generate_func_call( 'bv.Map', args=[ generate_validator_constructor(ns, dt.key_data_type), generate_validator_constructor(ns, dt.value_data_type), ]) elif is_numeric_type(dt): v = generate_func_call( 'bv.{}'.format(dt.name), kwargs=[('min_value', dt.min_value), ('max_value', dt.max_value)], ) elif is_string_type(dt): pattern = None if dt.pattern is not None: pattern = repr(dt.pattern) v = generate_func_call( 'bv.String', kwargs=[('min_length', dt.min_length), ('max_length', dt.max_length), ('pattern', pattern)], ) elif is_timestamp_type(dt): v = generate_func_call( 'bv.Timestamp', args=[repr(dt.format)], ) elif is_user_defined_type(dt): v = fmt_class(dt.name) + '_validator' if ns.name != dt.namespace.name: v = '{}.{}'.format(dt.namespace.name, v) elif is_alias(dt): # Assume that the alias has already been declared elsewhere. name = fmt_class(dt.name) + '_validator' if ns.name != dt.namespace.name: name = '{}.{}'.format(dt.namespace.name, name) v = name elif is_boolean_type(dt) or is_bytes_type(dt) or is_void_type(dt): v = generate_func_call('bv.{}'.format(dt.name)) else: raise AssertionError('Unsupported data type: %r' % dt) if nullable_dt: return generate_func_call('bv.Nullable', args=[v]) else: return v
def fmt_serial_obj(data_type): data_type, _ = unwrap_nullable(data_type) if is_user_defined_type(data_type): result = fmt_serial_class(fmt_class_prefix(data_type)) else: result = _serial_table.get(data_type.__class__, fmt_class(data_type.name)) return result
def fmt_class_type(data_type, suppress_ptr=False): data_type, _ = unwrap_nullable(data_type) if is_user_defined_type(data_type): result = '{}'.format(fmt_class_prefix(data_type)) else: result = _primitive_table.get(data_type.__class__, fmt_class(data_type.name)) if suppress_ptr: result = result.replace(' *', '') result = result.replace('*', '') if is_list_type(data_type): data_type, _ = unwrap_nullable(data_type.data_type) result = result + '<{}>'.format(fmt_type(data_type)) elif is_map_type(data_type): data_type, _ = unwrap_nullable(data_type.value_data_type) result = result + '<NSString *, {}>'.format(fmt_type(data_type)) return result
def fmt_property(field): attrs = ['nonatomic', 'readonly'] data_type, nullable = unwrap_nullable(field.data_type) if is_string_type(data_type): attrs.append('copy') if nullable: attrs.append('nullable') base_string = '@property ({}) {}{};' return base_string.format( ', '.join(attrs), fmt_type(field.data_type, tag=True, is_prop=True), fmt_var(field.name))
def fmt_property(field): attrs = ['nonatomic', 'readonly'] data_type, nullable = unwrap_nullable(field.data_type) if is_string_type(data_type): attrs.append('copy') if nullable: attrs.append('nullable') base_string = '@property ({}) {}{};' return base_string.format(', '.join(attrs), fmt_type(field.data_type, tag=True, is_prop=True), fmt_var(field.name))
def fmt_type(data_type): data_type, nullable = unwrap_nullable(data_type) if is_user_defined_type(data_type): result = '{}.{}'.format(fmt_class(data_type.namespace.name), fmt_class(data_type.name)) else: result = _type_table.get(data_type.__class__, fmt_class(data_type.name)) if is_list_type(data_type): result = result + '<{}>'.format(fmt_type(data_type.data_type)) return result if not nullable else result + '?'
def make_test_field(field_name, stone_type, rust_generator, reference_impls): rust_name = rust_generator.field_name_raw( field_name) if field_name is not None else None typ, option = ir.unwrap_nullable(stone_type) inner = None value = None if ir.is_struct_type(typ): if typ.has_enumerated_subtypes(): variant = typ.get_enumerated_subtypes()[0] inner = TestPolymorphicStruct(rust_generator, typ, reference_impls, variant) else: inner = TestStruct(rust_generator, typ, reference_impls) value = inner.value elif ir.is_union_type(typ): # Pick the first tag. # We could generate tests for them all, but it would lead to a huge explosion of tests, and # the types themselves are tested elsewhere. if len(typ.fields) == 0: # there must be a parent type; go for it variant = typ.all_fields[0] else: variant = typ.fields[0] inner = TestUnion(rust_generator, typ, reference_impls, variant) value = inner.value elif ir.is_list_type(typ): inner = TestList(rust_generator, typ.data_type, reference_impls) value = [inner.value] elif ir.is_map_type(typ): inner = TestMap(rust_generator, typ, reference_impls) value = inner.value elif ir.is_string_type(typ): if typ.pattern: value = Unregex(typ.pattern, typ.min_length).generate() elif typ.min_length: value = 'a' * typ.min_length else: value = 'something' elif ir.is_numeric_type(typ): value = typ.max_value or typ.maximum or 1e307 elif ir.is_boolean_type(typ): value = True elif ir.is_timestamp_type(typ): value = datetime.datetime.utcfromtimestamp(2**33 - 1) elif ir.is_bytes_type(typ): value = bytes([0, 1, 2, 3, 4, 5]) elif not ir.is_void_type(typ): raise RuntimeError(u'Error: unhandled field type of {}: {}'.format( field_name, typ)) return TestField(rust_name, value, inner, typ, option)
def fmt_route_type(data_type, tag=False, has_default=False): data_type, nullable = unwrap_nullable(data_type) if is_user_defined_type(data_type): result = '{} *'.format(fmt_class_prefix(data_type)) else: result = _primitive_table_user_interface.get(data_type.__class__, fmt_class(data_type.name)) if is_list_type(data_type): data_type, _ = unwrap_nullable(data_type.data_type) result = result + '<{}> *'.format(fmt_type(data_type)) elif is_map_type(data_type): data_type, _ = unwrap_nullable(data_type.value_data_type) result = result + '<NSString *, {}>'.format(fmt_type(data_type)) if is_user_defined_type(data_type) and tag: if nullable or has_default: result = 'nullable ' + result elif not is_void_type(data_type): result += '' return result
def fmt_type(data_type, namespace=None, use_interface=False): data_type, nullable = unwrap_nullable(data_type) if is_list_type(data_type): return '[]%s' % fmt_type(data_type.data_type, namespace, use_interface) type_name = data_type.name if use_interface and _needs_base_type(data_type): type_name = 'Is' + type_name if is_composite_type(data_type) and namespace is not None and \ namespace.name != data_type.namespace.name: type_name = data_type.namespace.name + '.' + type_name if use_interface and _needs_base_type(data_type): return _type_table.get(data_type.__class__, type_name) else: return _type_table.get(data_type.__class__, '*' + type_name)
def _struct_init_args(self, data_type, namespace=None): # pylint: disable=unused-argument args = [] for field in data_type.all_fields: name = fmt_var(field.name) value = fmt_type(field.data_type) data_type, nullable = unwrap_nullable(field.data_type) if field.has_default: if is_union_type(data_type): default = '.{}'.format(fmt_var(field.default.tag_name)) else: default = fmt_obj(field.default) value += ' = {}'.format(default) elif nullable: value += ' = nil' arg = (name, value) args.append(arg) return args
def fmt_serial_obj(data_type): data_type, nullable = unwrap_nullable(data_type) if is_user_defined_type(data_type): result = '{}.{}Serializer()' result = result.format(fmt_class(data_type.namespace.name), fmt_class(data_type.name)) else: result = _serial_type_table.get(data_type.__class__, fmt_class(data_type.name)) if is_list_type(data_type): result = result + '({})'.format(fmt_serial_obj(data_type.data_type)) elif is_timestamp_type(data_type): result = result + '("{}")'.format(data_type.format) else: result = 'Serialization._{}'.format(result) return result if not nullable else 'NullableSerializer({})'.format(result)
def fmt_type(data_type, namespace=None, use_interface=False, raw=False): data_type, nullable = unwrap_nullable(data_type) if is_list_type(data_type): if raw and is_primitive_type(data_type.data_type): return "json.RawMessage" return '[]%s' % fmt_type(data_type.data_type, namespace, use_interface, raw) if raw: return "json.RawMessage" type_name = data_type.name if use_interface and _needs_base_type(data_type): type_name = 'Is' + type_name if is_composite_type(data_type) and namespace is not None and \ namespace.name != data_type.namespace.name: type_name = data_type.namespace.name + '.' + type_name if use_interface and _needs_base_type(data_type): return _type_table.get(data_type.__class__, type_name) else: return _type_table.get(data_type.__class__, '*' + type_name)
def fmt_serial_obj(data_type): data_type, nullable = unwrap_nullable(data_type) if is_user_defined_type(data_type): result = '{}.{}Serializer()' result = result.format(fmt_class(data_type.namespace.name), fmt_class(data_type.name)) else: result = _serial_type_table.get(data_type.__class__, fmt_class(data_type.name)) if is_list_type(data_type): result = result + '({})'.format(fmt_serial_obj( data_type.data_type)) elif is_timestamp_type(data_type): result = result + '("{}")'.format(data_type.format) else: result = 'Serialization._{}'.format(result) return result if not nullable else 'NullableSerializer({})'.format(result)
def _determine_validator_type(self, data_type, value): data_type, nullable = unwrap_nullable(data_type) if is_list_type(data_type): item_validator = self._determine_validator_type( data_type.data_type, value) if item_validator: v = "arrayValidator({})".format( self._func_args([ ("minItems", data_type.min_items), ("maxItems", data_type.max_items), ("itemValidator", item_validator), ])) else: return None elif is_numeric_type(data_type): v = "comparableValidator({})".format( self._func_args([ ("minValue", data_type.min_value), ("maxValue", data_type.max_value), ])) elif is_string_type(data_type): pat = data_type.pattern if data_type.pattern else None pat = pat.encode('unicode_escape').replace( six.ensure_binary("\""), six.ensure_binary("\\\"")) if pat else pat v = "stringValidator({})".format( self._func_args([ ("minLength", data_type.min_length), ("maxLength", data_type.max_length), ("pattern", '"{}"'.format(six.ensure_str(pat)) if pat else None), ])) else: return None if nullable: v = "nullableValidator({})".format(v) return v
def _determine_validator_type(self, data_type, value): data_type, nullable = unwrap_nullable(data_type) if is_list_type(data_type): item_validator = self._determine_validator_type(data_type.data_type, value) if item_validator: v = "arrayValidator({})".format( self._func_args([ ("minItems", data_type.min_items), ("maxItems", data_type.max_items), ("itemValidator", item_validator), ]) ) else: return None elif is_numeric_type(data_type): v = "comparableValidator({})".format( self._func_args([ ("minValue", data_type.min_value), ("maxValue", data_type.max_value), ]) ) elif is_string_type(data_type): pat = data_type.pattern if data_type.pattern else None pat = pat.encode('unicode_escape').replace("\"", "\\\"") if pat else pat v = "stringValidator({})".format( self._func_args([ ("minLength", data_type.min_length), ("maxLength", data_type.max_length), ("pattern", '"{}"'.format(pat) if pat else None), ]) ) else: return None if nullable: v = "nullableValidator({})".format(v) return v
def fmt_type(data_type, namespace=None, use_interface=False, raw=False): data_type, nullable = unwrap_nullable(data_type) if is_list_type(data_type): if raw and not _needs_base_type(data_type.data_type): return "json.RawMessage" return '[]%s' % fmt_type(data_type.data_type, namespace, use_interface, raw) if raw: return "json.RawMessage" type_name = data_type.name if use_interface and _needs_base_type(data_type): type_name = 'Is' + type_name if is_composite_type(data_type) and namespace is not None and \ namespace.name != data_type.namespace.name: type_name = data_type.namespace.name + '.' + type_name if use_interface and _needs_base_type(data_type): return _type_table.get(data_type.__class__, type_name) else: if data_type.__class__ not in _type_table: return '*' + type_name if data_type.__class__ == Timestamp: # For other primitive types, `omitempty` does the job. return ('*' if nullable else '') + _type_table[data_type.__class__] return _type_table[data_type.__class__]
def _generate_struct_type(self, struct_type, indent_spaces, extra_parameters): """ Generates a TypeScript interface for a stone struct. """ namespace = struct_type.namespace if struct_type.doc: self._emit_tsdoc_header(struct_type.doc) parent_type = struct_type.parent_type extends_line = ' extends %s' % fmt_type_name(parent_type, namespace) if parent_type else '' self.emit('interface %s%s {' % (fmt_type_name(struct_type, namespace), extends_line)) with self.indent(dent=indent_spaces): for param_name, param_type, param_docstring in extra_parameters: if param_docstring: self._emit_tsdoc_header(param_docstring) self.emit('%s: %s;' % (param_name, param_type)) for field in struct_type.fields: doc = field.doc field_type, nullable = unwrap_nullable(field.data_type) field_ts_type = fmt_type(field_type, namespace) optional = nullable or field.has_default if field.has_default: # doc may be None. If it is not empty, add newlines # before appending to it. doc = doc + '\n\n' if doc else '' doc = "Defaults to %s." % field.default if doc: self._emit_tsdoc_header(doc) # Translate nullable types into optional properties. field_name = '%s?' % field.name if optional else field.name self.emit('%s: %s;' % (field_name, field_ts_type)) self.emit('}') self.emit() # Some structs can explicitly list their subtypes. These structs have a .tag field that # indicate which subtype they are, which is only present when a type reference is # ambiguous. # Emit a special interface that contains this extra field, and refer to it whenever we # encounter a reference to a type with enumerated subtypes. if struct_type.is_member_of_enumerated_subtypes_tree(): if struct_type.has_enumerated_subtypes(): # This struct is the parent to multiple subtypes. Determine all of the possible # values of the .tag property. tag_values = [] for tags, _ in struct_type.get_all_subtypes_with_tags(): for tag in tags: tag_values.append('"%s"' % tag) tag_union = fmt_union(tag_values) self._emit_tsdoc_header('Reference to the %s polymorphic type. Contains a .tag ' 'property to let you discriminate between possible ' 'subtypes.' % fmt_type_name(struct_type, namespace)) self.emit('interface %s extends %s {' % (fmt_polymorphic_type_reference(struct_type, namespace), fmt_type_name(struct_type, namespace))) with self.indent(dent=indent_spaces): self._emit_tsdoc_header('Tag identifying the subtype variant.') self.emit('\'.tag\': %s;' % tag_union) self.emit('}') self.emit() else: # This struct is a particular subtype. Find the applicable .tag value from the # parent type, which may be an arbitrary number of steps up the inheritance # hierarchy. parent = struct_type.parent_type while not parent.has_enumerated_subtypes(): parent = parent.parent_type # parent now contains the closest parent type in the inheritance hierarchy that has # enumerated subtypes. Determine which subtype this is. for subtype in parent.get_enumerated_subtypes(): if subtype.data_type == struct_type: self._emit_tsdoc_header('Reference to the %s type, identified by the ' 'value of the .tag property.' % fmt_type_name(struct_type, namespace)) self.emit('interface %s extends %s {' % (fmt_polymorphic_type_reference(struct_type, namespace), fmt_type_name(struct_type, namespace))) with self.indent(dent=indent_spaces): self._emit_tsdoc_header('Tag identifying this subtype variant. This ' 'field is only present when needed to ' 'discriminate between multiple possible ' 'subtypes.') self.emit_wrapped_text('\'.tag\': \'%s\';' % subtype.name) self.emit('}') self.emit() break
def is_primitive_type(data_type): data_type, _ = unwrap_nullable(data_type) return data_type.__class__ in _wrapper_primitives
def generate_validator_constructor(ns, data_type): """ Given a Stone data type, returns a string that can be used to construct the appropriate validation object in Python. """ dt, nullable_dt = unwrap_nullable(data_type) if is_list_type(dt): v = generate_func_call( 'bv.List', args=[ generate_validator_constructor(ns, dt.data_type)], kwargs=[ ('min_items', dt.min_items), ('max_items', dt.max_items)], ) elif is_map_type(dt): v = generate_func_call( 'bv.Map', args=[ generate_validator_constructor(ns, dt.key_data_type), generate_validator_constructor(ns, dt.value_data_type), ] ) elif is_numeric_type(dt): v = generate_func_call( 'bv.{}'.format(dt.name), kwargs=[ ('min_value', dt.min_value), ('max_value', dt.max_value)], ) elif is_string_type(dt): pattern = None if dt.pattern is not None: pattern = repr(dt.pattern) v = generate_func_call( 'bv.String', kwargs=[ ('min_length', dt.min_length), ('max_length', dt.max_length), ('pattern', pattern)], ) elif is_timestamp_type(dt): v = generate_func_call( 'bv.Timestamp', args=[repr(dt.format)], ) elif is_user_defined_type(dt): v = fmt_class(dt.name) + '_validator' if ns.name != dt.namespace.name: v = '{}.{}'.format(dt.namespace.name, v) elif is_alias(dt): # Assume that the alias has already been declared elsewhere. name = fmt_class(dt.name) + '_validator' if ns.name != dt.namespace.name: name = '{}.{}'.format(dt.namespace.name, name) v = name elif is_boolean_type(dt) or is_bytes_type(dt) or is_void_type(dt): v = generate_func_call('bv.{}'.format(dt.name)) else: raise AssertionError('Unsupported data type: %r' % dt) if nullable_dt: return generate_func_call('bv.Nullable', args=[v]) else: return v