Exemplo n.º 1
0
    def read_type(cls, f, user_type_map):
        optid = read_short(f)
        try:
            typeclass = cls.type_codes[optid]
        except KeyError:
            raise NotSupportedError(
                "Unknown data type code 0x%04x. Have to skip"
                " entire result set." % (optid, ))
        if typeclass in (ListType, SetType):
            subtype = cls.read_type(f, user_type_map)
            typeclass = typeclass.apply_parameters((subtype, ))
        elif typeclass == MapType:
            keysubtype = cls.read_type(f, user_type_map)
            valsubtype = cls.read_type(f, user_type_map)
            typeclass = typeclass.apply_parameters((keysubtype, valsubtype))
        elif typeclass == TupleType:
            num_items = read_short(f)
            types = tuple(
                cls.read_type(f, user_type_map) for _ in range(num_items))
            typeclass = typeclass.apply_parameters(types)
        elif typeclass == UserType:
            ks = read_string(f)
            udt_name = read_string(f)
            num_fields = read_short(f)
            names_and_types = tuple(
                (read_string(f), cls.read_type(f, user_type_map))
                for _ in range(num_fields))
            mapped_class = user_type_map.get(ks, {}).get(udt_name)
            typeclass = typeclass.make_udt_class(ks, udt_name, names_and_types,
                                                 mapped_class)
        elif typeclass == CUSTOM_TYPE:
            classname = read_string(f)
            typeclass = lookup_casstype(classname)

        return typeclass
Exemplo n.º 2
0
    def read_type(cls, f, user_type_map):
        optid = read_short(f)
        try:
            typeclass = cls.type_codes[optid]
        except KeyError:
            raise NotSupportedError("Unknown data type code 0x%04x. Have to skip"
                                    " entire result set." % (optid,))
        if typeclass in (ListType, SetType):
            subtype = cls.read_type(f, user_type_map)
            typeclass = typeclass.apply_parameters((subtype,))
        elif typeclass == MapType:
            keysubtype = cls.read_type(f, user_type_map)
            valsubtype = cls.read_type(f, user_type_map)
            typeclass = typeclass.apply_parameters((keysubtype, valsubtype))
        elif typeclass == TupleType:
            num_items = read_short(f)
            types = tuple(cls.read_type(f, user_type_map) for _ in range(num_items))
            typeclass = typeclass.apply_parameters(types)
        elif typeclass == UserType:
            ks = read_string(f)
            udt_name = read_string(f)
            num_fields = read_short(f)
            names, types = zip(*((read_string(f), cls.read_type(f, user_type_map))
                                 for _ in range(num_fields)))
            specialized_type = typeclass.make_udt_class(ks, udt_name, names, types)
            specialized_type.mapped_class = user_type_map.get(ks, {}).get(udt_name)
            typeclass = specialized_type
        elif typeclass == CUSTOM_TYPE:
            classname = read_string(f)
            typeclass = lookup_casstype(classname)

        return typeclass
 def test_marshal_platform(self):
     for proto_ver in protocol_versions:
         for geo in self.samples:
             cql_type = lookup_casstype(geo.__class__.__name__ + 'Type')
             self.assertEqual(
                 cql_type.from_binary(cql_type.to_binary(geo, proto_ver),
                                      proto_ver), geo)
Exemplo n.º 4
0
 def _build_column_metadata(self, table_metadata, row):
     name = row["column_name"]
     data_type = types.lookup_casstype(row["validator"])
     column_meta = ColumnMetadata(table_metadata, name, data_type)
     index_meta = self._build_index_metadata(column_meta, row)
     column_meta.index = index_meta
     return column_meta
Exemplo n.º 5
0
 def _build_column_metadata(self, table_metadata, row):
     name = row["column_name"]
     data_type = types.lookup_casstype(row["validator"])
     column_meta = ColumnMetadata(table_metadata, name, data_type)
     index_meta = self._build_index_metadata(column_meta, row)
     column_meta.index = index_meta
     return column_meta
Exemplo n.º 6
0
 def _build_column_metadata(self, table_metadata, row):
     name = row["column_name"]
     data_type = types.lookup_casstype(row["validator"])
     is_static = row.get("type", None) == "static"
     column_meta = ColumnMetadata(table_metadata, name, data_type, is_static=is_static)
     index_meta = self._build_index_metadata(column_meta, row)
     column_meta.index = index_meta
     return column_meta
 def test_empty_wkb(self):
     for cls in (LineString, Polygon):
         class_name = cls.__name__
         cql_type = lookup_casstype(class_name + 'Type')
         self.assertEqual(
             str(cql_type.from_binary(cql_type.to_binary(cls(), 0), 0)),
             class_name.upper() + " EMPTY")
     self.assertEqual(
         str(PointType.from_binary(PointType.to_binary(Point(), 0), 0)),
         "POINT (nan nan)")
Exemplo n.º 8
0
 def test_marshalling(self):
     for serializedval, valtype, nativeval in marshalled_value_pairs:
         marshaller = lookup_casstype(valtype)
         whatwegot = marshaller.to_binary(nativeval, 1)
         self.assertEqual(whatwegot, serializedval,
                          msg='Marshaller for %s (%s) failed: marshal(%r) got %r instead of %r'
                              % (valtype, marshaller, nativeval, whatwegot, serializedval))
         self.assertEqual(type(whatwegot), type(serializedval),
                          msg='Marshaller for %s (%s) gave wrong type (%s instead of %s)'
                              % (valtype, marshaller, type(whatwegot), type(serializedval)))
Exemplo n.º 9
0
 def test_unmarshalling(self):
     for serializedval, valtype, nativeval in marshalled_value_pairs:
         unmarshaller = lookup_casstype(valtype)
         whatwegot = unmarshaller.from_binary(serializedval)
         self.assertEqual(whatwegot, nativeval,
                          msg='Unmarshaller for %s (%s) failed: unmarshal(%r) got %r instead of %r'
                              % (valtype, unmarshaller, serializedval, whatwegot, nativeval))
         self.assertEqual(type(whatwegot), type(nativeval),
                          msg='Unmarshaller for %s (%s) gave wrong type (%s instead of %s)'
                              % (valtype, unmarshaller, type(whatwegot), type(nativeval)))
Exemplo n.º 10
0
 def _build_column_metadata(self, table_metadata, row):
     name = row["column_name"]
     data_type = types.lookup_casstype(row["validator"])
     is_static = row.get("type", None) == "static"
     column_meta = ColumnMetadata(table_metadata,
                                  name,
                                  data_type,
                                  is_static=is_static)
     index_meta = self._build_index_metadata(column_meta, row)
     column_meta.index = index_meta
     return column_meta
Exemplo n.º 11
0
    def test_normalized_lookup(self):
        key_type = lookup_casstype('MapType(UTF8Type, Int32Type)')
        protocol_version = 3
        om = OrderedMapSerializedKey(key_type, protocol_version)
        key_ascii = {'one': 1}
        key_unicode = {u'two': 2}
        om._insert_unchecked(key_ascii, key_type.serialize(key_ascii, protocol_version), object())
        om._insert_unchecked(key_unicode, key_type.serialize(key_unicode, protocol_version), object())

        # type lookup is normalized by key_type
        # PYTHON-231
        self.assertIs(om[{'one': 1}], om[{u'one': 1}])
        self.assertIs(om[{'two': 2}], om[{u'two': 2}])
        self.assertIsNot(om[{'one': 1}], om[{'two': 2}])
Exemplo n.º 12
0
 def test_marshalling(self):
     for serializedval, valtype, nativeval in marshalled_value_pairs:
         marshaller = lookup_casstype(valtype)
         whatwegot = marshaller.to_binary(nativeval)
         self.assertEqual(
             whatwegot,
             serializedval,
             msg="Marshaller for %s (%s) failed: marshal(%r) got %r instead of %r"
             % (valtype, marshaller, nativeval, whatwegot, serializedval),
         )
         self.assertEqual(
             type(whatwegot),
             type(serializedval),
             msg="Marshaller for %s (%s) gave wrong type (%s instead of %s)"
             % (valtype, marshaller, type(whatwegot), type(serializedval)),
         )
    def test_normalized_lookup(self):
        key_type = lookup_casstype('MapType(UTF8Type, Int32Type)')
        protocol_version = 3
        om = OrderedMapSerializedKey(key_type, protocol_version)
        key_ascii = {'one': 1}
        key_unicode = {u'two': 2}
        om._insert_unchecked(key_ascii,
                             key_type.serialize(key_ascii, protocol_version),
                             object())
        om._insert_unchecked(key_unicode,
                             key_type.serialize(key_unicode, protocol_version),
                             object())

        # type lookup is normalized by key_type
        # PYTHON-231
        self.assertIs(om[{'one': 1}], om[{u'one': 1}])
        self.assertIs(om[{'two': 2}], om[{u'two': 2}])
        self.assertIsNot(om[{'one': 1}], om[{'two': 2}])
Exemplo n.º 14
0
    def read_type(cls, f):
        optid = read_short(f)
        try:
            typeclass = cls.type_codes[optid]
        except KeyError:
            raise NotSupportedError("Unknown data type code 0x%04x. Have to skip"
                                    " entire result set." % (optid,))
        if typeclass in (ListType, SetType):
            subtype = cls.read_type(f)
            typeclass = typeclass.apply_parameters((subtype,))
        elif typeclass == MapType:
            keysubtype = cls.read_type(f)
            valsubtype = cls.read_type(f)
            typeclass = typeclass.apply_parameters((keysubtype, valsubtype))
        elif typeclass == CUSTOM_TYPE:
            classname = read_string(f)
            typeclass = lookup_casstype(classname)

        return typeclass
Exemplo n.º 15
0
 def test_marshal_platform(self):
     for proto_ver in protocol_versions:
         for geo in self.samples:
             cql_type = lookup_casstype(geo.__class__.__name__ + 'Type')
             self.assertEqual(cql_type.from_binary(cql_type.to_binary(geo, proto_ver), proto_ver), geo)
Exemplo n.º 16
0
 def test_empty_wkb(self):
     for cls in (LineString, Polygon):
         class_name = cls.__name__
         cql_type = lookup_casstype(class_name + 'Type')
         self.assertEqual(str(cql_type.from_binary(cql_type.to_binary(cls(), 0), 0)), class_name.upper() + " EMPTY")
     self.assertEqual(str(PointType.from_binary(PointType.to_binary(Point(), 0), 0)), "POINT (nan nan)")
Exemplo n.º 17
0
    def _build_table_metadata(self, keyspace_metadata, row, col_rows, trigger_rows):
        cfname = row["columnfamily_name"]
        cf_col_rows = col_rows.get(cfname, [])

        if not cf_col_rows:  # CASSANDRA-8487
            log.warning("Building table metadata with no column meta for %s.%s",
                        keyspace_metadata.name, cfname)

        comparator = types.lookup_casstype(row["comparator"])

        if issubclass(comparator, types.CompositeType):
            column_name_types = comparator.subtypes
            is_composite_comparator = True
        else:
            column_name_types = (comparator,)
            is_composite_comparator = False

        num_column_name_components = len(column_name_types)
        last_col = column_name_types[-1]

        column_aliases = row.get("column_aliases", None)

        clustering_rows = [r for r in cf_col_rows
                           if r.get('type', None) == "clustering_key"]
        if len(clustering_rows) > 1:
            clustering_rows = sorted(clustering_rows, key=lambda row: row.get('component_index'))

        if column_aliases is not None:
            column_aliases = json.loads(column_aliases)
        else:
            column_aliases = [r.get('column_name') for r in clustering_rows]

        if is_composite_comparator:
            if issubclass(last_col, types.ColumnToCollectionType):
                # collections
                is_compact = False
                has_value = False
                clustering_size = num_column_name_components - 2
            elif (len(column_aliases) == num_column_name_components - 1
                    and issubclass(last_col, types.UTF8Type)):
                # aliases?
                is_compact = False
                has_value = False
                clustering_size = num_column_name_components - 1
            else:
                # compact table
                is_compact = True
                has_value = column_aliases or not cf_col_rows
                clustering_size = num_column_name_components

                # Some thrift tables define names in composite types (see PYTHON-192)
                if not column_aliases and hasattr(comparator, 'fieldnames'):
                    column_aliases = comparator.fieldnames
        else:
            is_compact = True
            if column_aliases or not cf_col_rows:
                has_value = True
                clustering_size = num_column_name_components
            else:
                has_value = False
                clustering_size = 0

        table_meta = TableMetadata(keyspace_metadata, cfname)
        table_meta.comparator = comparator

        # partition key
        partition_rows = [r for r in cf_col_rows
                          if r.get('type', None) == "partition_key"]

        if len(partition_rows) > 1:
            partition_rows = sorted(partition_rows, key=lambda row: row.get('component_index'))

        key_aliases = row.get("key_aliases")
        if key_aliases is not None:
            key_aliases = json.loads(key_aliases) if key_aliases else []
        else:
            # In 2.0+, we can use the 'type' column. In 3.0+, we have to use it.
            key_aliases = [r.get('column_name') for r in partition_rows]

        key_validator = row.get("key_validator")
        if key_validator is not None:
            key_type = types.lookup_casstype(key_validator)
            key_types = key_type.subtypes if issubclass(key_type, types.CompositeType) else [key_type]
        else:
            key_types = [types.lookup_casstype(r.get('validator')) for r in partition_rows]

        for i, col_type in enumerate(key_types):
            if len(key_aliases) > i:
                column_name = key_aliases[i]
            elif i == 0:
                column_name = "key"
            else:
                column_name = "key%d" % i

            col = ColumnMetadata(table_meta, column_name, col_type)
            table_meta.columns[column_name] = col
            table_meta.partition_key.append(col)

        # clustering key
        for i in range(clustering_size):
            if len(column_aliases) > i:
                column_name = column_aliases[i]
            else:
                column_name = "column%d" % i

            col = ColumnMetadata(table_meta, column_name, column_name_types[i])
            table_meta.columns[column_name] = col
            table_meta.clustering_key.append(col)

        # value alias (if present)
        if has_value:
            value_alias_rows = [r for r in cf_col_rows
                                if r.get('type', None) == "compact_value"]

            if not key_aliases:  # TODO are we checking the right thing here?
                value_alias = "value"
            else:
                value_alias = row.get("value_alias", None)
                if value_alias is None and value_alias_rows:  # CASSANDRA-8487
                    # In 2.0+, we can use the 'type' column. In 3.0+, we have to use it.
                    value_alias = value_alias_rows[0].get('column_name')

            default_validator = row.get("default_validator")
            if default_validator:
                validator = types.lookup_casstype(default_validator)
            else:
                if value_alias_rows:  # CASSANDRA-8487
                    validator = types.lookup_casstype(value_alias_rows[0].get('validator'))

            col = ColumnMetadata(table_meta, value_alias, validator)
            if value_alias:  # CASSANDRA-8487
                table_meta.columns[value_alias] = col

        # other normal columns
        for col_row in cf_col_rows:
            column_meta = self._build_column_metadata(table_meta, col_row)
            table_meta.columns[column_meta.name] = column_meta

        if trigger_rows:
            for trigger_row in trigger_rows[cfname]:
                trigger_meta = self._build_trigger_metadata(table_meta, trigger_row)
                table_meta.triggers[trigger_meta.name] = trigger_meta

        table_meta.options = self._build_table_options(row)
        table_meta.is_compact_storage = is_compact

        return table_meta
Exemplo n.º 18
0
    def test_lookup_casstype(self):
        """
        Ensure lookup_casstype returns the correct classes
        """

        self.assertEqual(lookup_casstype('AsciiType'), cassandra.cqltypes.AsciiType)
        self.assertEqual(lookup_casstype('LongType'), cassandra.cqltypes.LongType)
        self.assertEqual(lookup_casstype('BytesType'), cassandra.cqltypes.BytesType)
        self.assertEqual(lookup_casstype('BooleanType'), cassandra.cqltypes.BooleanType)
        self.assertEqual(lookup_casstype('CounterColumnType'), cassandra.cqltypes.CounterColumnType)
        self.assertEqual(lookup_casstype('DateType'), cassandra.cqltypes.DateType)
        self.assertEqual(lookup_casstype('DecimalType'), cassandra.cqltypes.DecimalType)
        self.assertEqual(lookup_casstype('DoubleType'), cassandra.cqltypes.DoubleType)
        self.assertEqual(lookup_casstype('FloatType'), cassandra.cqltypes.FloatType)
        self.assertEqual(lookup_casstype('InetAddressType'), cassandra.cqltypes.InetAddressType)
        self.assertEqual(lookup_casstype('Int32Type'), cassandra.cqltypes.Int32Type)
        self.assertEqual(lookup_casstype('UTF8Type'), cassandra.cqltypes.UTF8Type)
        self.assertEqual(lookup_casstype('DateType'), cassandra.cqltypes.DateType)
        self.assertEqual(lookup_casstype('TimeType'), cassandra.cqltypes.TimeType)
        self.assertEqual(lookup_casstype('TimeUUIDType'), cassandra.cqltypes.TimeUUIDType)
        self.assertEqual(lookup_casstype('UUIDType'), cassandra.cqltypes.UUIDType)
        self.assertEqual(lookup_casstype('IntegerType'), cassandra.cqltypes.IntegerType)
        self.assertEqual(lookup_casstype('MapType'), cassandra.cqltypes.MapType)
        self.assertEqual(lookup_casstype('ListType'), cassandra.cqltypes.ListType)
        self.assertEqual(lookup_casstype('SetType'), cassandra.cqltypes.SetType)
        self.assertEqual(lookup_casstype('CompositeType'), cassandra.cqltypes.CompositeType)
        self.assertEqual(lookup_casstype('ColumnToCollectionType'), cassandra.cqltypes.ColumnToCollectionType)
        self.assertEqual(lookup_casstype('ReversedType'), cassandra.cqltypes.ReversedType)

        self.assertEqual(str(lookup_casstype('unknown')), str(cassandra.cqltypes.mkUnrecognizedType('unknown')))

        self.assertRaises(ValueError, lookup_casstype, 'AsciiType~')

        # TODO: Do a few more tests
        # "I would say some parameterized and nested types would be good to test,
        # like "MapType(AsciiType, IntegerType)" and "ReversedType(AsciiType)"
        self.assertEqual(str(lookup_casstype(BooleanType(True))), str(BooleanType(True)))
Exemplo n.º 19
0
    def _build_table_metadata(self, keyspace_metadata, row, col_rows,
                              trigger_rows):
        cfname = row["columnfamily_name"]
        cf_col_rows = col_rows.get(cfname, [])

        if not cf_col_rows:  # CASSANDRA-8487
            log.warning(
                "Building table metadata with no column meta for %s.%s",
                keyspace_metadata.name, cfname)

        comparator = types.lookup_casstype(row["comparator"])

        if issubclass(comparator, types.CompositeType):
            column_name_types = comparator.subtypes
            is_composite_comparator = True
        else:
            column_name_types = (comparator, )
            is_composite_comparator = False

        num_column_name_components = len(column_name_types)
        last_col = column_name_types[-1]

        column_aliases = row.get("column_aliases", None)

        clustering_rows = [
            r for r in cf_col_rows if r.get('type', None) == "clustering_key"
        ]
        if len(clustering_rows) > 1:
            clustering_rows = sorted(
                clustering_rows, key=lambda row: row.get('component_index'))

        if column_aliases is not None:
            column_aliases = json.loads(column_aliases)
        else:
            column_aliases = [r.get('column_name') for r in clustering_rows]

        if is_composite_comparator:
            if issubclass(last_col, types.ColumnToCollectionType):
                # collections
                is_compact = False
                has_value = False
                clustering_size = num_column_name_components - 2
            elif (len(column_aliases) == num_column_name_components - 1
                  and issubclass(last_col, types.UTF8Type)):
                # aliases?
                is_compact = False
                has_value = False
                clustering_size = num_column_name_components - 1
            else:
                # compact table
                is_compact = True
                has_value = column_aliases or not cf_col_rows
                clustering_size = num_column_name_components

                # Some thrift tables define names in composite types (see PYTHON-192)
                if not column_aliases and hasattr(comparator, 'fieldnames'):
                    column_aliases = comparator.fieldnames
        else:
            is_compact = True
            if column_aliases or not cf_col_rows:
                has_value = True
                clustering_size = num_column_name_components
            else:
                has_value = False
                clustering_size = 0

        table_meta = TableMetadata(keyspace_metadata, cfname)
        table_meta.comparator = comparator

        # partition key
        partition_rows = [
            r for r in cf_col_rows if r.get('type', None) == "partition_key"
        ]

        if len(partition_rows) > 1:
            partition_rows = sorted(partition_rows,
                                    key=lambda row: row.get('component_index'))

        key_aliases = row.get("key_aliases")
        if key_aliases is not None:
            key_aliases = json.loads(key_aliases) if key_aliases else []
        else:
            # In 2.0+, we can use the 'type' column. In 3.0+, we have to use it.
            key_aliases = [r.get('column_name') for r in partition_rows]

        key_validator = row.get("key_validator")
        if key_validator is not None:
            key_type = types.lookup_casstype(key_validator)
            key_types = key_type.subtypes if issubclass(
                key_type, types.CompositeType) else [key_type]
        else:
            key_types = [
                types.lookup_casstype(r.get('validator'))
                for r in partition_rows
            ]

        for i, col_type in enumerate(key_types):
            if len(key_aliases) > i:
                column_name = key_aliases[i]
            elif i == 0:
                column_name = "key"
            else:
                column_name = "key%d" % i

            col = ColumnMetadata(table_meta, column_name, col_type)
            table_meta.columns[column_name] = col
            table_meta.partition_key.append(col)

        # clustering key
        for i in range(clustering_size):
            if len(column_aliases) > i:
                column_name = column_aliases[i]
            else:
                column_name = "column%d" % i

            col = ColumnMetadata(table_meta, column_name, column_name_types[i])
            table_meta.columns[column_name] = col
            table_meta.clustering_key.append(col)

        # value alias (if present)
        if has_value:
            value_alias_rows = [
                r for r in cf_col_rows
                if r.get('type', None) == "compact_value"
            ]

            if not key_aliases:  # TODO are we checking the right thing here?
                value_alias = "value"
            else:
                value_alias = row.get("value_alias", None)
                if value_alias is None and value_alias_rows:  # CASSANDRA-8487
                    # In 2.0+, we can use the 'type' column. In 3.0+, we have to use it.
                    value_alias = value_alias_rows[0].get('column_name')

            default_validator = row.get("default_validator")
            if default_validator:
                validator = types.lookup_casstype(default_validator)
            else:
                if value_alias_rows:  # CASSANDRA-8487
                    validator = types.lookup_casstype(
                        value_alias_rows[0].get('validator'))

            col = ColumnMetadata(table_meta, value_alias, validator)
            if value_alias:  # CASSANDRA-8487
                table_meta.columns[value_alias] = col

        # other normal columns
        for col_row in cf_col_rows:
            column_meta = self._build_column_metadata(table_meta, col_row)
            table_meta.columns[column_meta.name] = column_meta

        if trigger_rows:
            for trigger_row in trigger_rows[cfname]:
                trigger_meta = self._build_trigger_metadata(
                    table_meta, trigger_row)
                table_meta.triggers[trigger_meta.name] = trigger_meta

        table_meta.options = self._build_table_options(row)
        table_meta.is_compact_storage = is_compact

        return table_meta
Exemplo n.º 20
0
    def test_lookup_casstype(self):
        """
        Ensure lookup_casstype returns the correct classes
        """

        self.assertEqual(lookup_casstype('AsciiType'), cassandra.cqltypes.AsciiType)
        self.assertEqual(lookup_casstype('LongType'), cassandra.cqltypes.LongType)
        self.assertEqual(lookup_casstype('BytesType'), cassandra.cqltypes.BytesType)
        self.assertEqual(lookup_casstype('BooleanType'), cassandra.cqltypes.BooleanType)
        self.assertEqual(lookup_casstype('CounterColumnType'), cassandra.cqltypes.CounterColumnType)
        self.assertEqual(lookup_casstype('DateType'), cassandra.cqltypes.DateType)
        self.assertEqual(lookup_casstype('DecimalType'), cassandra.cqltypes.DecimalType)
        self.assertEqual(lookup_casstype('DoubleType'), cassandra.cqltypes.DoubleType)
        self.assertEqual(lookup_casstype('FloatType'), cassandra.cqltypes.FloatType)
        self.assertEqual(lookup_casstype('InetAddressType'), cassandra.cqltypes.InetAddressType)
        self.assertEqual(lookup_casstype('Int32Type'), cassandra.cqltypes.Int32Type)
        self.assertEqual(lookup_casstype('UTF8Type'), cassandra.cqltypes.UTF8Type)
        self.assertEqual(lookup_casstype('DateType'), cassandra.cqltypes.DateType)
        self.assertEqual(lookup_casstype('TimeType'), cassandra.cqltypes.TimeType)
        self.assertEqual(lookup_casstype('ByteType'), cassandra.cqltypes.ByteType)
        self.assertEqual(lookup_casstype('ShortType'), cassandra.cqltypes.ShortType)
        self.assertEqual(lookup_casstype('TimeUUIDType'), cassandra.cqltypes.TimeUUIDType)
        self.assertEqual(lookup_casstype('UUIDType'), cassandra.cqltypes.UUIDType)
        self.assertEqual(lookup_casstype('IntegerType'), cassandra.cqltypes.IntegerType)
        self.assertEqual(lookup_casstype('MapType'), cassandra.cqltypes.MapType)
        self.assertEqual(lookup_casstype('ListType'), cassandra.cqltypes.ListType)
        self.assertEqual(lookup_casstype('SetType'), cassandra.cqltypes.SetType)
        self.assertEqual(lookup_casstype('CompositeType'), cassandra.cqltypes.CompositeType)
        self.assertEqual(lookup_casstype('ColumnToCollectionType'), cassandra.cqltypes.ColumnToCollectionType)
        self.assertEqual(lookup_casstype('ReversedType'), cassandra.cqltypes.ReversedType)

        self.assertEqual(str(lookup_casstype('unknown')), str(cassandra.cqltypes.mkUnrecognizedType('unknown')))

        self.assertRaises(ValueError, lookup_casstype, 'AsciiType~')
Exemplo n.º 21
0
    def _build_table_metadata(self, keyspace_metadata, row, col_rows):
        cfname = row["columnfamily_name"]

        comparator = types.lookup_casstype(row["comparator"])
        if issubclass(comparator, types.CompositeType):
            column_name_types = comparator.subtypes
            is_composite = True
        else:
            column_name_types = (comparator, )
            is_composite = False

        num_column_name_components = len(column_name_types)
        last_col = column_name_types[-1]

        column_aliases = json.loads(row["column_aliases"])
        if is_composite:
            if issubclass(last_col, types.ColumnToCollectionType):
                # collections
                is_compact = False
                has_value = False
                clustering_size = num_column_name_components - 2
            elif (len(column_aliases) == num_column_name_components - 1
                  and issubclass(last_col, types.UTF8Type)):
                # aliases?
                is_compact = False
                has_value = False
                clustering_size = num_column_name_components - 1
            else:
                # compact table
                is_compact = True
                has_value = True
                clustering_size = num_column_name_components
        else:
            is_compact = True
            if column_aliases or not col_rows.get(cfname):
                has_value = True
                clustering_size = num_column_name_components
            else:
                has_value = False
                clustering_size = 0

        table_meta = TableMetadata(keyspace_metadata, cfname)
        table_meta.comparator = comparator

        # partition key
        key_aliases = row.get("key_aliases")
        key_aliases = json.loads(key_aliases) if key_aliases else []

        key_type = types.lookup_casstype(row["key_validator"])
        key_types = key_type.subtypes if issubclass(
            key_type, types.CompositeType) else [key_type]
        for i, col_type in enumerate(key_types):
            if len(key_aliases) > i:
                column_name = key_aliases[i]
            elif i == 0:
                column_name = "key"
            else:
                column_name = "key%d" % i

            col = ColumnMetadata(table_meta, column_name, col_type)
            table_meta.columns[column_name] = col
            table_meta.partition_key.append(col)

        # clustering key
        for i in range(clustering_size):
            if len(column_aliases) > i:
                column_name = column_aliases[i]
            else:
                column_name = "column%d" % i

            col = ColumnMetadata(table_meta, column_name, column_name_types[i])
            table_meta.columns[column_name] = col
            table_meta.clustering_key.append(col)

        # value alias (if present)
        if has_value:
            validator = types.lookup_casstype(row["default_validator"])
            if not key_aliases:  # TODO are we checking the right thing here?
                value_alias = "value"
            else:
                value_alias = row["value_alias"]

            col = ColumnMetadata(table_meta, value_alias, validator)
            table_meta.columns[value_alias] = col

        # other normal columns
        if col_rows:
            for col_row in col_rows[cfname]:
                column_meta = self._build_column_metadata(table_meta, col_row)
                table_meta.columns[column_meta.name] = column_meta

        table_meta.options = self._build_table_options(row, is_compact)
        return table_meta
Exemplo n.º 22
0
 def get_fields_with_types(self, ksname, typename):
     return [(field[0], lookup_casstype(field[1]).cql_parameterized_type()) for field in
             self._meta.get(ksname, {}).get(typename, [])]
Exemplo n.º 23
0
    def test_lookup_casstype(self):
        """
        Ensure lookup_casstype returns the correct classes
        """

        self.assertEqual(lookup_casstype('AsciiType'),
                         cassandra.cqltypes.AsciiType)
        self.assertEqual(lookup_casstype('LongType'),
                         cassandra.cqltypes.LongType)
        self.assertEqual(lookup_casstype('BytesType'),
                         cassandra.cqltypes.BytesType)
        self.assertEqual(lookup_casstype('BooleanType'),
                         cassandra.cqltypes.BooleanType)
        self.assertEqual(lookup_casstype('CounterColumnType'),
                         cassandra.cqltypes.CounterColumnType)
        self.assertEqual(lookup_casstype('DateType'),
                         cassandra.cqltypes.DateType)
        self.assertEqual(lookup_casstype('DecimalType'),
                         cassandra.cqltypes.DecimalType)
        self.assertEqual(lookup_casstype('DoubleType'),
                         cassandra.cqltypes.DoubleType)
        self.assertEqual(lookup_casstype('FloatType'),
                         cassandra.cqltypes.FloatType)
        self.assertEqual(lookup_casstype('InetAddressType'),
                         cassandra.cqltypes.InetAddressType)
        self.assertEqual(lookup_casstype('Int32Type'),
                         cassandra.cqltypes.Int32Type)
        self.assertEqual(lookup_casstype('UTF8Type'),
                         cassandra.cqltypes.UTF8Type)
        self.assertEqual(lookup_casstype('DateType'),
                         cassandra.cqltypes.DateType)
        self.assertEqual(lookup_casstype('TimeType'),
                         cassandra.cqltypes.TimeType)
        self.assertEqual(lookup_casstype('ByteType'),
                         cassandra.cqltypes.ByteType)
        self.assertEqual(lookup_casstype('ShortType'),
                         cassandra.cqltypes.ShortType)
        self.assertEqual(lookup_casstype('TimeUUIDType'),
                         cassandra.cqltypes.TimeUUIDType)
        self.assertEqual(lookup_casstype('UUIDType'),
                         cassandra.cqltypes.UUIDType)
        self.assertEqual(lookup_casstype('IntegerType'),
                         cassandra.cqltypes.IntegerType)
        self.assertEqual(lookup_casstype('MapType'),
                         cassandra.cqltypes.MapType)
        self.assertEqual(lookup_casstype('ListType'),
                         cassandra.cqltypes.ListType)
        self.assertEqual(lookup_casstype('SetType'),
                         cassandra.cqltypes.SetType)
        self.assertEqual(lookup_casstype('CompositeType'),
                         cassandra.cqltypes.CompositeType)
        self.assertEqual(lookup_casstype('ColumnToCollectionType'),
                         cassandra.cqltypes.ColumnToCollectionType)
        self.assertEqual(lookup_casstype('ReversedType'),
                         cassandra.cqltypes.ReversedType)
        self.assertEqual(lookup_casstype('DurationType'),
                         cassandra.cqltypes.DurationType)

        self.assertEqual(str(lookup_casstype('unknown')),
                         str(cassandra.cqltypes.mkUnrecognizedType('unknown')))

        self.assertRaises(ValueError, lookup_casstype, 'AsciiType~')
Exemplo n.º 24
0
    def test_lookup_casstype(self):
        """
        Ensure lookup_casstype returns the correct classes
        """

        self.assertEqual(lookup_casstype('AsciiType'),
                         cassandra.cqltypes.AsciiType)
        self.assertEqual(lookup_casstype('LongType'),
                         cassandra.cqltypes.LongType)
        self.assertEqual(lookup_casstype('BytesType'),
                         cassandra.cqltypes.BytesType)
        self.assertEqual(lookup_casstype('BooleanType'),
                         cassandra.cqltypes.BooleanType)
        self.assertEqual(lookup_casstype('CounterColumnType'),
                         cassandra.cqltypes.CounterColumnType)
        self.assertEqual(lookup_casstype('DecimalType'),
                         cassandra.cqltypes.DecimalType)
        self.assertEqual(lookup_casstype('DoubleType'),
                         cassandra.cqltypes.DoubleType)
        self.assertEqual(lookup_casstype('FloatType'),
                         cassandra.cqltypes.FloatType)
        self.assertEqual(lookup_casstype('InetAddressType'),
                         cassandra.cqltypes.InetAddressType)
        self.assertEqual(lookup_casstype('Int32Type'),
                         cassandra.cqltypes.Int32Type)
        self.assertEqual(lookup_casstype('UTF8Type'),
                         cassandra.cqltypes.UTF8Type)
        self.assertEqual(lookup_casstype('DateType'),
                         cassandra.cqltypes.DateType)
        self.assertEqual(lookup_casstype('TimeUUIDType'),
                         cassandra.cqltypes.TimeUUIDType)
        self.assertEqual(lookup_casstype('UUIDType'),
                         cassandra.cqltypes.UUIDType)
        self.assertEqual(lookup_casstype('IntegerType'),
                         cassandra.cqltypes.IntegerType)
        self.assertEqual(lookup_casstype('MapType'),
                         cassandra.cqltypes.MapType)
        self.assertEqual(lookup_casstype('ListType'),
                         cassandra.cqltypes.ListType)
        self.assertEqual(lookup_casstype('SetType'),
                         cassandra.cqltypes.SetType)
        self.assertEqual(lookup_casstype('CompositeType'),
                         cassandra.cqltypes.CompositeType)
        self.assertEqual(lookup_casstype('ColumnToCollectionType'),
                         cassandra.cqltypes.ColumnToCollectionType)
        self.assertEqual(lookup_casstype('ReversedType'),
                         cassandra.cqltypes.ReversedType)

        self.assertEqual(str(lookup_casstype('unknown')),
                         str(cassandra.cqltypes.mkUnrecognizedType('unknown')))

        self.assertRaises(ValueError, lookup_casstype, 'AsciiType~')

        # TODO: Do a few more tests
        # "I would say some parameterized and nested types would be good to test,
        # like "MapType(AsciiType, IntegerType)" and "ReversedType(AsciiType)"
        self.assertEqual(str(lookup_casstype(BooleanType(True))),
                         str(BooleanType(True)))
Exemplo n.º 25
0
 def __repr__(self):
     return "{0} {1} {2}".format(self.name, lookup_casstype(self.validator).typename, self.cql_type if self.cql_type!="regular" else "")
Exemplo n.º 26
0
    def _build_table_metadata(self, keyspace_metadata, row, col_rows):
        cfname = row["columnfamily_name"]

        comparator = types.lookup_casstype(row["comparator"])
        if issubclass(comparator, types.CompositeType):
            column_name_types = comparator.subtypes
            is_composite = True
        else:
            column_name_types = (comparator,)
            is_composite = False

        num_column_name_components = len(column_name_types)
        last_col = column_name_types[-1]

        column_aliases = json.loads(row["column_aliases"])
        if is_composite:
            if issubclass(last_col, types.ColumnToCollectionType):
                # collections
                is_compact = False
                has_value = False
                clustering_size = num_column_name_components - 2
            elif (len(column_aliases) == num_column_name_components - 1
                    and issubclass(last_col, types.UTF8Type)):
                # aliases?
                is_compact = False
                has_value = False
                clustering_size = num_column_name_components - 1
            else:
                # compact table
                is_compact = True
                has_value = True
                clustering_size = num_column_name_components
        else:
            is_compact = True
            if column_aliases or not col_rows.get(cfname):
                has_value = True
                clustering_size = num_column_name_components
            else:
                has_value = False
                clustering_size = 0

        table_meta = TableMetadata(keyspace_metadata, cfname)
        table_meta.comparator = comparator

        # partition key
        key_aliases = row.get("key_aliases")
        key_aliases = json.loads(key_aliases) if key_aliases else []

        key_type = types.lookup_casstype(row["key_validator"])
        key_types = key_type.subtypes if issubclass(key_type, types.CompositeType) else [key_type]
        for i, col_type in enumerate(key_types):
            if len(key_aliases) > i:
                column_name = key_aliases[i]
            elif i == 0:
                column_name = "key"
            else:
                column_name = "key%d" % i

            col = ColumnMetadata(table_meta, column_name, col_type)
            table_meta.columns[column_name] = col
            table_meta.partition_key.append(col)

        # clustering key
        for i in range(clustering_size):
            if len(column_aliases) > i:
                column_name = column_aliases[i]
            else:
                column_name = "column%d" % i

            col = ColumnMetadata(table_meta, column_name, column_name_types[i])
            table_meta.columns[column_name] = col
            table_meta.clustering_key.append(col)

        # value alias (if present)
        if has_value:
            validator = types.lookup_casstype(row["default_validator"])
            if not key_aliases:  # TODO are we checking the right thing here?
                value_alias = "value"
            else:
                value_alias = row["value_alias"]

            col = ColumnMetadata(table_meta, value_alias, validator)
            table_meta.columns[value_alias] = col

        # other normal columns
        if col_rows:
            for col_row in col_rows[cfname]:
                column_meta = self._build_column_metadata(table_meta, col_row)
                table_meta.columns[column_meta.name] = column_meta

        table_meta.options = self._build_table_options(row)
        table_meta.is_compact_storage = is_compact
        return table_meta
Exemplo n.º 27
0
 def get_fields_with_types(self, ksname, typename):
     return [(field[0], lookup_casstype(field[1]).cql_parameterized_type()) for field in
             self._meta.get(ksname, {}).get(typename, [])]