def from_json(json): parser = ovs.db.parser.Parser(json, "IDL schema") idlPrefix = parser.get("idlPrefix", [unicode]) idlHeader = parser.get("idlHeader", [unicode]) subjson = dict(json) del subjson["idlPrefix"] del subjson["idlHeader"] schema = DbSchema.from_json(subjson) return IdlSchema(schema.name, schema.tables, idlPrefix, idlHeader)
def from_json(json): parser = ovs.db.parser.Parser(json, "IDL schema") idlPrefix = parser.get("idlPrefix", six.string_types) idlHeader = parser.get("idlHeader", six.string_types) subjson = dict(json) del subjson["idlPrefix"] del subjson["idlHeader"] schema = DbSchema.from_json(subjson) return IdlSchema(schema.name, schema.version, schema.tables, idlPrefix, idlHeader)
def from_json(json): parser = ovs.db.parser.Parser(json, "IDL schema") idlPrefix = parser.get("idlPrefix", [str, unicode]) idlHeader = parser.get("idlHeader", [str, unicode]) idlTableAliases = parser.get_optional("idlTableAliases", [list]) subjson = dict(json) del subjson["idlPrefix"] del subjson["idlHeader"] if idlTableAliases: del subjson["idlTableAliases"] schema = DbSchema.from_json(subjson) return IdlSchema(schema.name, schema.version, schema.tables, idlPrefix, idlHeader, idlTableAliases)
def from_json(json): if type(json) in [str, unicode]: return Type(BaseType.from_json(json)) parser = ovs.db.parser.Parser(json, "ovsdb type") key_json = parser.get("key", [dict, str, unicode]) value_json = parser.get_optional("value", [dict, str, unicode]) min_json = parser.get_optional("min", [int]) max_json = parser.get_optional("max", [int, str, unicode]) parser.finish() key = BaseType.from_json(key_json) if value_json: value = BaseType.from_json(value_json) else: value = None n_min = Type.__n_from_json(min_json, Type.DEFAULT_MIN) if max_json == 'unlimited': n_max = sys.maxint else: n_max = Type.__n_from_json(max_json, Type.DEFAULT_MAX) type_ = Type(key, value, n_min, n_max) if not type_.is_valid(): raise error.Error("ovsdb type fails constraint checks", json) return type_
def from_json(json): parser = ovs.db.parser.Parser(json, "IDL schema") idlPrefix = parser.get("idlPrefix", six.string_types) idlHeader = parser.get("idlHeader", six.string_types) cDecls = parser.get_optional("cDecls", six.string_types, "") hDecls = parser.get_optional("hDecls", six.string_types, "") subjson = dict(json) del subjson["idlPrefix"] del subjson["idlHeader"] subjson.pop("cDecls", None) subjson.pop("hDecls", None) schema = DbSchema.from_json(subjson, allow_extensions=True) return IdlSchema(schema.name, schema.version, schema.tables, idlPrefix, idlHeader, cDecls, hDecls)
def from_json(json, name): parser = ovs.db.parser.Parser(json, "table schema for table %s" % name) columnsJson = parser.get("columns", [dict]) mutable = parser.get_optional("mutable", [bool], True) max_rows = parser.get_optional("maxRows", [int]) parser.finish() if max_rows == None: max_rows = sys.maxint elif max_rows <= 0: raise error.Error("maxRows must be at least 1", json) if not columnsJson: raise error.Error("table must have at least one column", json) columns = {} for columnName, columnJson in columnsJson.iteritems(): if columnName.startswith('_'): raise error.Error("names beginning with \"_\" are reserved", json) elif not ovs.db.parser.is_identifier(columnName): raise error.Error("name must be a valid id", json) columns[columnName] = ColumnSchema.from_json( columnJson, columnName) return TableSchema(name, columns, mutable, max_rows)
def from_json(json): parser = ovs.db.parser.Parser(json, "database schema") name = parser.get("name", ['id']) tablesJson = parser.get("tables", [dict]) parser.finish() tables = {} for tableName, tableJson in tablesJson.iteritems(): if tableName.startswith('_'): raise error.Error("names beginning with \"_\" are reserved", json) elif not ovs.db.parser.is_identifier(tableName): raise error.Error("name must be a valid id", json) tables[tableName] = TableSchema.from_json(tableJson, tableName) return DbSchema(name, tables)
def from_json(json): if type(json) in [str, unicode]: return Type(BaseType.from_json(json)) parser = ovs.db.parser.Parser(json, "ovsdb type") key_json = parser.get("key", [dict, str, unicode]) value_json = parser.get_optional("value", [dict, str, unicode]) min_json = parser.get_optional("min", [int]) max_json = parser.get_optional("max", [int, str, unicode]) parser.finish() key = BaseType.from_json(key_json) if value_json: value = BaseType.from_json(value_json) else: value = None n_min = Type.__n_from_json(min_json, Type.DEFAULT_MIN) if max_json == "unlimited": n_max = sys.maxint else: n_max = Type.__n_from_json(max_json, Type.DEFAULT_MAX) type_ = Type(key, value, n_min, n_max) if not type_.is_valid(): raise error.Error("ovsdb type fails constraint checks", json) return type_
def from_json(json): if isinstance(json, six.string_types): return Type(BaseType.from_json(json)) parser = ovs.db.parser.Parser(json, "ovsdb type") _types = list(six.string_types) _types.extend([dict]) key_json = parser.get("key", _types) value_json = parser.get_optional("value", _types) min_json = parser.get_optional("min", [int]) _types = list(six.string_types) _types.extend([int]) max_json = parser.get_optional("max", _types) parser.finish() key = BaseType.from_json(key_json) if value_json: value = BaseType.from_json(value_json) else: value = None n_min = Type.__n_from_json(min_json, Type.DEFAULT_MIN) if max_json == 'unlimited': n_max = sys.maxsize else: n_max = Type.__n_from_json(max_json, Type.DEFAULT_MAX) type_ = Type(key, value, n_min, n_max) if not type_.is_valid(): raise error.Error("ovsdb type fails constraint checks", json) return type_
def from_json(json, name): parser = ovs.db.parser.Parser(json, "table schema for table %s" % name) columnsJson = parser.get("columns", [dict]) mutable = parser.get_optional("mutable", [bool], True) max_rows = parser.get_optional("maxRows", [int]) parser.finish() if max_rows == None: max_rows = sys.maxint elif max_rows <= 0: raise error.Error("maxRows must be at least 1", json) if not columnsJson: raise error.Error("table must have at least one column", json) columns = {} for columnName, columnJson in columnsJson.iteritems(): if columnName.startswith('_'): raise error.Error("names beginning with \"_\" are reserved", json) elif not ovs.db.parser.is_identifier(columnName): raise error.Error("name must be a valid id", json) columns[columnName] = ColumnSchema.from_json(columnJson, columnName) return TableSchema(name, columns, mutable, max_rows)
def from_json(json, name): parser = ovs.db.parser.Parser(json, "schema for column %s" % name) mutable = parser.get_optional("mutable", [bool], True) ephemeral = parser.get_optional("ephemeral", [bool], False) type = types.Type.from_json(parser.get("type", [dict, unicode])) parser.finish() return ColumnSchema(name, mutable, not ephemeral, type)
def from_json(json, name): parser = ovs.db.parser.Parser(json, "schema for column %s" % name) mutable = parser.get_optional("mutable", [bool], True) ephemeral = parser.get_optional("ephemeral", [bool], False) type_ = types.Type.from_json(parser.get("type", [dict, str, unicode])) parser.finish() return ColumnSchema(name, mutable, not ephemeral, type_)
def from_json(json): parser = ovs.db.parser.Parser(json, "database schema") name = parser.get("name", ['id']) version = parser.get_optional("version", [str, unicode]) parser.get_optional("cksum", [str, unicode]) tablesJson = parser.get("tables", [dict]) parser.finish() if (version is not None and not re.match('[0-9]+\.[0-9]+\.[0-9]+$', version)): raise error.Error('schema version "%s" not in format x.y.z' % version) tables = {} for tableName, tableJson in tablesJson.iteritems(): _check_id(tableName, json) tables[tableName] = TableSchema.from_json(tableJson, tableName) return DbSchema(name, version, tables)
def from_json(json): parser = ovs.db.parser.Parser(json, "database schema") name = parser.get("name", ['id']) version = parser.get_optional("version", six.string_types) parser.get_optional("cksum", six.string_types) tablesJson = parser.get("tables", [dict]) parser.finish() if (version is not None and not re.match('[0-9]+\.[0-9]+\.[0-9]+$', version)): raise error.Error('schema version "%s" not in format x.y.z' % version) tables = {} for tableName, tableJson in six.iteritems(tablesJson): _check_id(tableName, json) tables[tableName] = TableSchema.from_json(tableJson, tableName) return DbSchema(name, version, tables)
def from_json(json, name): parser = ovs.db.parser.Parser(json, "schema for column %s" % name) mutable = parser.get_optional("mutable", [bool], True) ephemeral = parser.get_optional("ephemeral", [bool], False) _types = list(six.string_types) _types.extend([dict]) type_ = ovs.db.types.Type.from_json(parser.get("type", _types)) parser.finish() return ColumnSchema(name, mutable, not ephemeral, type_)
def from_json(json, name): parser = ovs.db.parser.Parser(json, "schema for column %s" % name) mutable = parser.get_optional("mutable", [bool], True) ephemeral = parser.get_optional("ephemeral", [bool], False) _types = list(six.string_types) _types.extend([dict]) type_ = ovs.db.types.Type.from_json(parser.get("type", _types)) parser.finish() if not mutable and (type_.key.is_weak_ref() or (type_.value and type_.value.is_weak_ref())): # We cannot allow a weak reference to be immutable: if referenced # rows are deleted, then the weak reference needs to change. mutable = True return ColumnSchema(name, mutable, not ephemeral, type_)
def from_json(json): if isinstance(json, six.string_types): return BaseType(AtomicType.from_json(json)) parser = ovs.db.parser.Parser(json, "ovsdb type") atomic_type = AtomicType.from_json(parser.get("type", six.string_types)) base = BaseType(atomic_type) enum = parser.get_optional("enum", []) if enum is not None: base.enum = ovs.db.data.Datum.from_json( BaseType.get_enum_type(base.type), enum) elif base.type == IntegerType: base.min = parser.get_optional("minInteger", six.integer_types) base.max = parser.get_optional("maxInteger", six.integer_types) if (base.min is not None and base.max is not None and base.min > base.max): raise error.Error("minInteger exceeds maxInteger", json) elif base.type == RealType: base.min = parser.get_optional("minReal", REAL_PYTHON_TYPES) base.max = parser.get_optional("maxReal", REAL_PYTHON_TYPES) if (base.min is not None and base.max is not None and base.min > base.max): raise error.Error("minReal exceeds maxReal", json) elif base.type == StringType: base.min_length = BaseType.__parse_uint(parser, "minLength", 0) base.max_length = BaseType.__parse_uint(parser, "maxLength", sys.maxsize) if base.min_length > base.max_length: raise error.Error("minLength exceeds maxLength", json) elif base.type == UuidType: base.ref_table_name = parser.get_optional("refTable", ['id']) if base.ref_table_name: base.ref_type = parser.get_optional("refType", six.string_types, "strong") if base.ref_type not in ['strong', 'weak']: raise error.Error('refType must be "strong" or "weak" ' '(not "%s")' % base.ref_type) parser.finish() return base
def from_json(json, name, allow_extensions=False): parser = ovs.db.parser.Parser(json, "table schema for table %s" % name) columns_json = parser.get("columns", [dict]) mutable = parser.get_optional("mutable", [bool], True) max_rows = parser.get_optional("maxRows", [int]) is_root = parser.get_optional("isRoot", [bool], False) indexes_json = parser.get_optional("indexes", [list], []) if allow_extensions: extensions = parser.get_optional("extensions", [dict], {}) else: extensions = {} parser.finish() if max_rows is None: max_rows = sys.maxsize elif max_rows <= 0: raise error.Error("maxRows must be at least 1", json) if not columns_json: raise error.Error("table must have at least one column", json) columns = {} for column_name, column_json in six.iteritems(columns_json): _check_id(column_name, json) columns[column_name] = ColumnSchema.from_json(column_json, column_name, allow_extensions) indexes = [] for index_json in indexes_json: index = column_set_from_json(index_json, columns) if not index: raise error.Error("index must have at least one column", json) elif len(index) == 1: index[0].unique = True for column in index: if not column.persistent: raise error.Error("ephemeral columns (such as %s) may " "not be indexed" % column.name, json) indexes.append(index) return TableSchema(name, columns, mutable, max_rows, is_root, indexes, extensions)
def from_json(json, name): parser = ovs.db.parser.Parser(json, "table schema for table %s" % name) columns_json = parser.get("columns", [dict]) mutable = parser.get_optional("mutable", [bool], True) max_rows = parser.get_optional("maxRows", [int]) is_root = parser.get_optional("isRoot", [bool], False) indexes_json = parser.get_optional("indexes", [list], []) parser.finish() if max_rows is None: max_rows = sys.maxsize elif max_rows <= 0: raise error.Error('Table - "%s": maxRows must be at least 1' % name, json) if not columns_json: raise error.Error('table "%s" must have at least one column' % name, json) columns = {} for column_name, column_json in six.iteritems(columns_json): _check_id(column_name, json) try: columns[column_name] = ColumnSchema.from_json(column_json, column_name) except error.Error as err: raise error.Error('Table - "%s": %s' % (name, err.msg), err.json) indexes = [] for index_json in indexes_json: index = column_set_from_json(index_json, columns) if not index: raise error.Error("index must have at least one column", json) elif len(index) == 1: index[0].unique = True for column in index: if not column.persistent: raise error.Error("ephemeral columns (such as %s) may " "not be indexed" % column.name, json) indexes.append(index) return TableSchema(name, columns, mutable, max_rows, is_root, indexes)
def from_json(json): if type(json) == unicode: return BaseType(AtomicType.from_json(json)) parser = ovs.db.parser.Parser(json, "ovsdb type") atomic_type = AtomicType.from_json(parser.get("type", [str, unicode])) base = BaseType(atomic_type) enum = parser.get_optional("enum", []) if enum is not None: base.enum = ovs.db.data.Datum.from_json(BaseType.get_enum_type(base.type), enum) elif base.type == IntegerType: base.min = parser.get_optional("minInteger", [int, long]) base.max = parser.get_optional("maxInteger", [int, long]) if base.min is not None and base.max is not None and base.min > base.max: raise error.Error("minInteger exceeds maxInteger", json) elif base.type == RealType: base.min = parser.get_optional("minReal", [int, long, float]) base.max = parser.get_optional("maxReal", [int, long, float]) if base.min is not None and base.max is not None and base.min > base.max: raise error.Error("minReal exceeds maxReal", json) elif base.type == StringType: base.min_length = BaseType.__parse_uint(parser, "minLength", 0) base.max_length = BaseType.__parse_uint(parser, "maxLength", sys.maxint) if base.min_length > base.max_length: raise error.Error("minLength exceeds maxLength", json) elif base.type == UuidType: base.ref_table = parser.get_optional("refTable", ['id']) if base.ref_table: base.ref_type = parser.get_optional("refType", [str, unicode], "strong") if base.ref_type not in ['strong', 'weak']: raise error.Error("refType must be \"strong\" or \"weak\" " "(not \"%s\")" % base.ref_type) parser.finish() return base