def from_json(json, name): parser = ovs.db.parser.Parser(json, "table schema for table %s" % name) columnsJson = parser.get("columns", [dict]) mutable = parser.get_optional("mutable", [bool], True) max_rows = parser.get_optional("maxRows", [int]) parser.finish() if max_rows == None: max_rows = sys.maxint elif max_rows <= 0: raise error.Error("maxRows must be at least 1", json) if not columnsJson: raise error.Error("table must have at least one column", json) columns = {} for columnName, columnJson in columnsJson.iteritems(): if columnName.startswith('_'): raise error.Error("names beginning with \"_\" are reserved", json) elif not ovs.db.parser.is_identifier(columnName): raise error.Error("name must be a valid id", json) columns[columnName] = ColumnSchema.from_json( columnJson, columnName) return TableSchema(name, columns, mutable, max_rows)
def from_json(json): if type(json) not in [str, unicode]: raise error.Error("atomic-type expected", json) try: return AtomicType.from_string(json) except error.Error: raise error.Error("\"%s\" is not an atomic-type" % json, json)
def inline_xml_to_nroff(node, font, to_upper=False, newline='\n'): if node.nodeType == node.TEXT_NODE: if to_upper: s = text_to_nroff(node.data.upper(), font) else: s = text_to_nroff(node.data, font) return s.replace('\n', newline) elif node.nodeType == node.ELEMENT_NODE: if node.tagName in ['code', 'em', 'option', 'env', 'b']: s = r'\fB' for child in node.childNodes: s += inline_xml_to_nroff(child, r'\fB', to_upper, newline) return s + font elif node.tagName == 'ref': if node.hasAttribute('column'): s = node.attributes['column'].nodeValue if node.hasAttribute('key'): s += ':' + node.attributes['key'].nodeValue elif node.hasAttribute('table'): s = node.attributes['table'].nodeValue elif node.hasAttribute('group'): s = node.attributes['group'].nodeValue elif node.hasAttribute('db'): s = node.attributes['db'].nodeValue elif node.hasAttribute('field'): s = node.attributes['field'].nodeValue elif node.hasAttribute('section'): s = node.attributes['section'].nodeValue else: raise error.Error("'ref' lacks required attributes: %s" % list(node.attributes.keys())) return r'\fB' + re.sub(r'\s+', ' ', s) + font elif node.tagName in ['var', 'dfn', 'i', 'cite']: s = r'\fI' for child in node.childNodes: s += inline_xml_to_nroff(child, r'\fI', to_upper, newline) return s + font elif node.tagName in ['literal']: s = r'\fL' for child in node.childNodes: s += inline_xml_to_nroff(child, r'\fL') return s + font elif node.tagName == 'url': return ('\n.URL "' + text_to_nroff(node.attributes['href'].nodeValue, escape_dot=False) + '"\n') else: raise error.Error("element <%s> unknown or invalid here" % node.tagName) elif node.nodeType == node.COMMENT_NODE: return '' else: raise error.Error("unknown node %s in inline xml" % node)
def validate(self, category): if category: if isinstance(category, dict): if not (OVSDB_CATEGORY_PERVALUE in category or OVSDB_CATEGORY_FOLLOWS in category): raise error.Error('Unknown category object ' 'attributes') elif isinstance(category, (str, unicode)): self.check_category(category) else: raise error.Error('Unknown category type %s' % type(category))
def from_json(type_, json, symtab=None): """Parses 'json' as a datum of the type described by 'type'. If successful, returns a new datum. On failure, raises an ovs.db.error.Error. Violations of constraints expressed by 'type' are treated as errors. If 'symtab' is nonnull, then named UUIDs in 'symtab' are accepted. Refer to RFC 7047 for information about this, and for the syntax that this function accepts.""" is_map = type_.is_map() if (is_map or (isinstance(json, list) and len(json) > 0 and json[0] == "set")): if is_map: class_ = "map" else: class_ = "set" inner = ovs.db.parser.unwrap_json(json, class_, [list, tuple], "array") n = len(inner) if n < type_.n_min or n > type_.n_max: raise error.Error("%s must have %d to %d members but %d are " "present" % (class_, type_.n_min, type_.n_max, n), json) values = {} for element in inner: if is_map: key, value = ovs.db.parser.parse_json_pair(element) keyAtom = Atom.from_json(type_.key, key, symtab) valueAtom = Atom.from_json(type_.value, value, symtab) else: keyAtom = Atom.from_json(type_.key, element, symtab) valueAtom = None if keyAtom in values: if is_map: raise error.Error("map contains duplicate key") else: raise error.Error("set contains duplicate") values[keyAtom] = valueAtom return Datum(type_, values) else: keyAtom = Atom.from_json(type_.key, json, symtab) return Datum(type_, {keyAtom: None})
def from_python(type_, value, row_to_uuid): """Returns a new Datum with the given ovs.db.types.Type 'type_'. The new datum's value is taken from 'value', which must take the form described as a valid return value from Datum.to_python() for 'type'. Each scalar value within 'value' is initially passed through 'row_to_uuid', which should convert objects that represent rows (if any) into uuid.UUID objects and return other data unchanged. Raises ovs.db.error.Error if 'value' is not in an appropriate form for 'type_'.""" d = {} if isinstance(value, dict): for k, v in six.iteritems(value): ka = Atom.from_python(type_.key, row_to_uuid(k)) va = Atom.from_python(type_.value, row_to_uuid(v)) d[ka] = va elif isinstance(value, (list, set, tuple)): for k in value: ka = Atom.from_python(type_.key, row_to_uuid(k)) d[ka] = None else: ka = Atom.from_python(type_.key, row_to_uuid(value)) d[ka] = None datum = Datum(type_, d) datum.check_constraints() if not datum.conforms_to_type(): raise error.Error("%d values when type requires between %d and %d" % (len(d), type_.n_min, type_.n_max)) return datum
def from_json(json): parser = ovs.db.parser.Parser(json, "database schema") name = parser.get("name", ['id']) tablesJson = parser.get("tables", [dict]) parser.finish() tables = {} for tableName, tableJson in tablesJson.iteritems(): if tableName.startswith('_'): raise error.Error("names beginning with \"_\" are reserved", json) elif not ovs.db.parser.is_identifier(tableName): raise error.Error("name must be a valid id", json) tables[tableName] = TableSchema.from_json(tableJson, tableName) return DbSchema(name, tables)
def __n_from_json(json, default): if json is None: return default elif type(json) == int and 0 <= json <= sys.maxint: return json else: raise error.Error("bad min or max value", json)
def from_json(json): if type(json) in [str, unicode]: return Type(BaseType.from_json(json)) parser = ovs.db.parser.Parser(json, "ovsdb type") key_json = parser.get("key", [dict, str, unicode]) value_json = parser.get_optional("value", [dict, str, unicode]) min_json = parser.get_optional("min", [int]) max_json = parser.get_optional("max", [int, str, unicode]) parser.finish() key = BaseType.from_json(key_json) if value_json: value = BaseType.from_json(value_json) else: value = None n_min = Type.__n_from_json(min_json, Type.DEFAULT_MIN) if max_json == 'unlimited': n_max = sys.maxint else: n_max = Type.__n_from_json(max_json, Type.DEFAULT_MAX) type_ = Type(key, value, n_min, n_max) if not type_.is_valid(): raise error.Error("ovsdb type fails constraint checks", json) return type_
def unwrap_json(json, name, need_type): if (type(json) != list or len(json) != 2 or json[0] != name or type(json[1]) != need_type): raise error.Error( 'expected ["%s", <%s>]' % (name, json_type_to_string(need_type)), json) return json[1]
def escape(match): c = match.group(0) # In Roman type, let -- in XML be \- in nroff. That gives us a way to # write minus signs, which is important in some places in manpages. # # Bold in nroff usually represents literal text, where there's no # distinction between hyphens and minus sign. The convention in nroff # appears to be to use a minus sign in such cases, so we follow that # convention. # # Finally, we always output - as a minus sign when it is followed by a # digit. if c.startswith('-'): if c == '--' and font == r'\fR': return r'\-' if c != '-' or font in (r'\fB', r'\fL'): return c.replace('-', r'\-') else: return '-' if c == '\\': return r'\e' elif c == '"': return r'\(dq' elif c == "'": return r'\(cq' elif c == ".": # groff(7) says that . can be escaped by \. but in practice groff # still gives an error with \. at the beginning of a line. return r'\[char46]' else: raise error.Error("bad escape")
def from_json(json): if isinstance(json, six.string_types): return Type(BaseType.from_json(json)) parser = ovs.db.parser.Parser(json, "ovsdb type") _types = list(six.string_types) _types.extend([dict]) key_json = parser.get("key", _types) value_json = parser.get_optional("value", _types) min_json = parser.get_optional("min", [int]) _types = list(six.string_types) _types.extend([int]) max_json = parser.get_optional("max", _types) parser.finish() key = BaseType.from_json(key_json) if value_json: value = BaseType.from_json(value_json) else: value = None n_min = Type.__n_from_json(min_json, Type.DEFAULT_MIN) if max_json == 'unlimited': n_max = sys.maxsize else: n_max = Type.__n_from_json(max_json, Type.DEFAULT_MAX) type_ = Type(key, value, n_min, n_max) if not type_.is_valid(): raise error.Error("ovsdb type fails constraint checks", json) return type_
def __n_from_json(json, default): if json is None: return default elif isinstance(json, int) and 0 <= json <= sys.maxsize: return json else: raise error.Error("bad min or max value", json)
def __init__(self, table_name, column_name, ovs_base_type, is_optional=True, mutable=True, category=None, valueMap=None, keyname=None, col_doc=None, group=None, relation=OVSDB_SCHEMA_REFERENCE, loadDescription=False): super(OVSReference, self).__init__(table_name, column_name, ovs_base_type, is_optional, mutable, category, None, valueMap, keyname, col_doc, group, loadDescription) key_type = ovs_base_type.key # Information of the table being referenced self.kv_type = False if key_type.type != types.UuidType: # referenced table name must be in value part of KV pair self.kv_type = True self.kv_key_type = key_type.type key_type = ovs_base_type.value self.ref_table = key_type.ref_table_name # Overwrite parsed type from parent class processing self.type = key_type # Relationship of the referenced to the current table # one of child, parent or reference if relation not in RELATIONSHIP_MAP.values(): raise error.Error('unknown table relationship %s' % relation) else: self.relation = relation # The number of instances self.is_plural = (self.n_max != 1)
def column_set_from_json(json, columns): if json is None: return tuple(columns) elif not isinstance(json, list): raise error.Error("array of distinct column names expected", json) else: for column_name in json: if not isinstance(column_name, six.string_types): raise error.Error("array of distinct column names expected", json) elif column_name not in columns: raise error.Error("%s is not a valid column name" % column_name, json) if len(set(json)) != len(json): # Duplicate. raise error.Error("array of distinct column names expected", json) return tuple([columns[column_name] for column_name in json])
def from_python(base, value): value = ovs.db.parser.float_to_int(value) if isinstance(value, base.type.python_types): atom = Atom(base.type, value) else: raise error.Error("expected %s, got %s" % (base.type, type(value))) atom.check_constraints(base) return atom
def column_set_from_json(json, columns): if json is None: return tuple(columns) elif type(json) != list: raise error.Error("array of distinct column names expected", json) else: for column_name in json: if type(column_name) not in [str, unicode]: raise error.Error("array of distinct column names expected", json) elif column_name not in columns: raise error.Error( "%s is not a valid column name" % column_name, json) if len(set(json)) != len(json): # Duplicate. raise error.Error("array of distinct column names expected", json) return tuple([columns[column_name] for column_name in json])
def from_json(json): if isinstance(json, six.string_types): return BaseType(AtomicType.from_json(json)) parser = ovs.db.parser.Parser(json, "ovsdb type") atomic_type = AtomicType.from_json(parser.get("type", six.string_types)) base = BaseType(atomic_type) enum = parser.get_optional("enum", []) if enum is not None: base.enum = ovs.db.data.Datum.from_json( BaseType.get_enum_type(base.type), enum) elif base.type == IntegerType: base.min = parser.get_optional("minInteger", six.integer_types) base.max = parser.get_optional("maxInteger", six.integer_types) if (base.min is not None and base.max is not None and base.min > base.max): raise error.Error("minInteger exceeds maxInteger", json) elif base.type == RealType: base.min = parser.get_optional("minReal", REAL_PYTHON_TYPES) base.max = parser.get_optional("maxReal", REAL_PYTHON_TYPES) if (base.min is not None and base.max is not None and base.min > base.max): raise error.Error("minReal exceeds maxReal", json) elif base.type == StringType: base.min_length = BaseType.__parse_uint(parser, "minLength", 0) base.max_length = BaseType.__parse_uint(parser, "maxLength", sys.maxsize) if base.min_length > base.max_length: raise error.Error("minLength exceeds maxLength", json) elif base.type == UuidType: base.ref_table_name = parser.get_optional("refTable", ['id']) if base.ref_table_name: base.ref_type = parser.get_optional("refType", six.string_types, "strong") if base.ref_type not in ['strong', 'weak']: raise error.Error('refType must be "strong" or "weak" ' '(not "%s")' % base.ref_type) parser.finish() return base
def __parse_uint(parser, name, default): value = parser.get_optional(name, [int, long]) if value is None: value = default else: max_value = 2**32 - 1 if not (0 <= value <= max_value): raise error.Error( "%s out of valid range 0 to %d" % (name, max_value), value) return value
def __do_parse_update(self, table_updates): if type(table_updates) != dict: raise error.Error("<table-updates> is not an object", table_updates) for table_name, table_update in table_updates.iteritems(): table = self.schema.tables.get(table_name) if not table: raise error.Error("<table-updates> includes unknown " "table \"%s\"" % table_name) if type(table_update) != dict: raise error.Error( "<table-update> for table \"%s\" is not " "an object" % table_name, table_update) for uuid_string, row_update in table_update.iteritems(): if not ovs.ovsuuid.UUID.is_valid_string(uuid_string): raise error.Error( "<table-update> for table \"%s\" " "contains bad UUID \"%s\" as member " "name" % (table_name, uuid_string), table_update) uuid = ovs.ovsuuid.UUID.from_string(uuid_string) if type(row_update) != dict: raise error.Error("<table-update> for table \"%s\" " "contains <row-update> for %s that " "is not an object" % (table_name, uuid_string)) old = row_update.get("old", None) new = row_update.get("new", None) if old is not None and type(old) != dict: raise error.Error("\"old\" <row> is not object", old) if new is not None and type(new) != dict: raise error.Error("\"new\" <row> is not object", new) if (old is not None) + (new is not None) != len(row_update): raise error.Error( "<row-update> contains unexpected " "member", row_update) if not old and not new: raise error.Error( "<row-update> missing \"old\" and " "\"new\" members", row_update) if self.__parse_row_update(table, uuid, old, new): self.change_seqno += 1
def from_json(json, name, allow_extensions=False): parser = ovs.db.parser.Parser(json, "table schema for table %s" % name) columns_json = parser.get("columns", [dict]) mutable = parser.get_optional("mutable", [bool], True) max_rows = parser.get_optional("maxRows", [int]) is_root = parser.get_optional("isRoot", [bool], False) indexes_json = parser.get_optional("indexes", [list], []) if allow_extensions: extensions = parser.get_optional("extensions", [dict], {}) else: extensions = {} parser.finish() if max_rows is None: max_rows = sys.maxsize elif max_rows <= 0: raise error.Error("maxRows must be at least 1", json) if not columns_json: raise error.Error("table must have at least one column", json) columns = {} for column_name, column_json in six.iteritems(columns_json): _check_id(column_name, json) columns[column_name] = ColumnSchema.from_json(column_json, column_name, allow_extensions) indexes = [] for index_json in indexes_json: index = column_set_from_json(index_json, columns) if not index: raise error.Error("index must have at least one column", json) elif len(index) == 1: index[0].unique = True for column in index: if not column.persistent: raise error.Error("ephemeral columns (such as %s) may " "not be indexed" % column.name, json) indexes.append(index) return TableSchema(name, columns, mutable, max_rows, is_root, indexes, extensions)
def inline_xml_to_nroff(node, font, to_upper=False, newline='\n'): if node.nodeType == node.TEXT_NODE: if to_upper: s = text_to_nroff(node.data.upper(), font) else: s = text_to_nroff(node.data, font) return s.replace('\n', newline) elif node.nodeType == node.ELEMENT_NODE: if node.tagName in ['code', 'em', 'option', 'env', 'b']: s = r'\fB' for child in node.childNodes: s += inline_xml_to_nroff(child, r'\fB', to_upper, newline) return s + font elif node.tagName == 'ref': s = r'\fB' if node.hasAttribute('column'): s += node.attributes['column'].nodeValue if node.hasAttribute('key'): s += ':' + node.attributes['key'].nodeValue elif node.hasAttribute('table'): s += node.attributes['table'].nodeValue elif node.hasAttribute('group'): s += node.attributes['group'].nodeValue elif node.hasAttribute('db'): s += node.attributes['db'].nodeValue else: raise error.Error("'ref' lacks required attributes: %s" % node.attributes.keys()) return s + font elif node.tagName in ['var', 'dfn', 'i']: s = r'\fI' for child in node.childNodes: s += inline_xml_to_nroff(child, r'\fI', to_upper, newline) return s + font else: raise error.Error("element <%s> unknown or invalid here" % node.tagName) elif node.nodeType == node.COMMENT_NODE: return '' else: raise error.Error("unknown node %s in inline xml" % node)
def from_json(json): if type(json) == unicode: return BaseType(AtomicType.from_json(json)) parser = ovs.db.parser.Parser(json, "ovsdb type") atomic_type = AtomicType.from_json(parser.get("type", [str, unicode])) base = BaseType(atomic_type) enum = parser.get_optional("enum", []) if enum is not None: base.enum = ovs.db.data.Datum.from_json(BaseType.get_enum_type(base.type), enum) elif base.type == IntegerType: base.min = parser.get_optional("minInteger", [int, long]) base.max = parser.get_optional("maxInteger", [int, long]) if base.min is not None and base.max is not None and base.min > base.max: raise error.Error("minInteger exceeds maxInteger", json) elif base.type == RealType: base.min = parser.get_optional("minReal", [int, long, float]) base.max = parser.get_optional("maxReal", [int, long, float]) if base.min is not None and base.max is not None and base.min > base.max: raise error.Error("minReal exceeds maxReal", json) elif base.type == StringType: base.min_length = BaseType.__parse_uint(parser, "minLength", 0) base.max_length = BaseType.__parse_uint(parser, "maxLength", sys.maxint) if base.min_length > base.max_length: raise error.Error("minLength exceeds maxLength", json) elif base.type == UuidType: base.ref_table = parser.get_optional("refTable", ['id']) if base.ref_table: base.ref_type = parser.get_optional("refType", [str, unicode], "strong") if base.ref_type not in ['strong', 'weak']: raise error.Error("refType must be \"strong\" or \"weak\" " "(not \"%s\")" % base.ref_type) parser.finish() return base
def main(argv): argv0 = argv[0] # When this is used with Python 3, the program produces no output. if six.PY2: # Make stdout and stderr UTF-8, even if they are redirected to a file. sys.stdout = codecs.getwriter("utf-8")(sys.stdout) sys.stderr = codecs.getwriter("utf-8")(sys.stderr) try: options, args = getopt.gnu_getopt(argv[1:], 'j:', ['multiple', 'json-parser']) except getopt.GetoptError as geo: sys.stderr.write("%s: %s\n" % (argv0, geo.msg)) sys.exit(1) multiple = False ORIG_PARSER = ovs.json.PARSER ovs.json.PARSER = ovs.json.PARSER_PY for key, value in options: if key == '--multiple': multiple = True elif key in ('-j', '--json-parser'): if value == "python": ovs.json.PARSER = ovs.json.PARSER_PY elif value in ('C', 'c'): if ORIG_PARSER != ovs.json.PARSER_C: raise error.Error("C parser selected, but not compiled") else: ovs.json.PARSER = ovs.json.PARSER_C else: sys.stderr.write("%s: unhandled option %s\n" % (argv0, key)) sys.exit(1) if len(args) != 1: sys.stderr.write("usage: %s [--multiple] INPUT.json\n" % argv0) sys.exit(1) input_file = args[0] if input_file == "-": stream = sys.stdin else: stream = open(input_file, "r") if multiple: ok = parse_multiple(stream) else: ok = print_json(ovs.json.from_stream(stream)) if not ok: sys.exit(1)
def from_json(base, json, symtab=None): type_ = base.type json = ovs.db.parser.float_to_int(json) if ((type_ == ovs.db.types.IntegerType and type(json) in [int, long]) or (type_ == ovs.db.types.RealType and type(json) in [int, long, float]) or (type_ == ovs.db.types.BooleanType and type(json) == bool) or (type_ == ovs.db.types.StringType and type(json) in [str, unicode])): atom = Atom(type_, json) elif type_ == ovs.db.types.UuidType: atom = Atom(type_, ovs.ovsuuid.UUID.from_json(json, symtab)) else: raise error.Error("expected %s" % type_.to_string(), json) atom.check_constraints(base) return atom
def from_json(json, symtab=None): try: s = ovs.db.parser.unwrap_json(json, "uuid", unicode) if not UUID.uuidRE.match(s): raise error.Error("\"%s\" is not a valid UUID" % s, json) return UUID(s) except error.Error, e: try: name = ovs.db.parser.unwrap_json(json, "named-uuid", unicode) except error.Error: raise e if name not in symtab: symtab[name] = uuid4() return symtab[name]
def from_json(_json, loadDescription): parser = ovs.db.parser.Parser(_json, 'extended OVSDB schema') # These are not used (yet), but the parser fails if they are not parsed parser.get_optional('$schema', [str, unicode]) parser.get_optional('id', [str, unicode]) name = parser.get('name', ['id']) version = parser.get_optional('version', [str, unicode]) tablesJson = parser.get('tables', [dict]) doc = None # Though these will not be used if documentation is not # loaded, they have to be parsed or OVS' Parser will fail _doc = parser.get_optional('doc', [list]) if loadDescription: if _doc: doc = ' '.join(_doc) parser.finish() if (version is not None and not re.match('[0-9]+\.[0-9]+\.[0-9]+$', version)): raise error.Error('schema version "%s" not in format x.y.z' % version) tables = {} for tableName, tableJson in tablesJson.iteritems(): tables[tableName] = OVSTable.from_json(tableJson, tableName, loadDescription) # Backfill the parent/child relationship info, mostly for # parent pointers which cannot be handled in place. for tableName, table in tables.iteritems(): for columnName, column in table.references.iteritems(): if column.relation == 'child': table.children.append(columnName) if tables[column.ref_table].parent is None: tables[column.ref_table].parent = tableName elif column.relation == 'parent': if tableName not in tables[column.ref_table].children: tables[column.ref_table].children.append(tableName) table.parent = column.ref_table return RESTSchema(name, version, tables, doc)
def from_json(json, symtab=None): try: s = ovs.db.parser.unwrap_json(json, "uuid", [str, unicode], "string") if not uuidRE.match(s): raise error.Error("\"%s\" is not a valid UUID" % s, json) return uuid.UUID(s) except error.Error as e: if not symtab: raise e try: name = ovs.db.parser.unwrap_json(json, "named-uuid", [str, unicode], "string") except error.Error: raise e if name not in symtab: symtab[name] = uuid.uuid4() return symtab[name]
def from_json(base, json, symtab=None): type_ = base.type json = ovs.db.parser.float_to_int(json) real_types = [int] real_types.extend([float]) real_types = tuple(real_types) if ((type_ == ovs.db.types.IntegerType and isinstance(json, int)) or (type_ == ovs.db.types.RealType and isinstance(json, real_types)) or (type_ == ovs.db.types.BooleanType and isinstance(json, bool)) or (type_ == ovs.db.types.StringType and isinstance(json, str))): atom = Atom(type_, json) elif type_ == ovs.db.types.UuidType: atom = Atom(type_, ovs.ovsuuid.from_json(json, symtab)) else: raise error.Error("expected %s" % type_.to_string(), json) atom.check_constraints(base) return atom
def from_json(json): parser = ovs.db.parser.Parser(json, "database schema") name = parser.get("name", ['id']) version = parser.get_optional("version", [str, unicode]) parser.get_optional("cksum", [str, unicode]) tablesJson = parser.get("tables", [dict]) parser.finish() if (version is not None and not re.match('[0-9]+\.[0-9]+\.[0-9]+$', version)): raise error.Error('schema version "%s" not in format x.y.z' % version) tables = {} for tableName, tableJson in tablesJson.iteritems(): _check_id(tableName, json) tables[tableName] = TableSchema.from_json(tableJson, tableName) return DbSchema(name, version, tables)