Exemple #1
0
def test_sql(client):

    # cleanup
    client.sql(drop_query)

    result = sql_fields(
        client,
        'PUBLIC',
        create_query,
        page_size,
        include_field_names=True
    )
    assert result.status == 0, result.message

    for i, data_line in enumerate(initial_data, start=1):
        fname, lname, grade = data_line
        result = sql_fields(
            client,
            'PUBLIC',
            insert_query,
            page_size,
            query_args=[i, fname, lname, grade],
            include_field_names=True
        )
        assert result.status == 0, result.message

    result = cache_get_configuration(client, 'SQL_PUBLIC_STUDENT')
    assert result.status == 0, result.message

    binary_type_name = result.value[PROP_QUERY_ENTITIES][0]['value_type_name']
    result = sql(
        client,
        'SQL_PUBLIC_STUDENT',
        binary_type_name,
        'TRUE',
        page_size
    )
    assert result.status == 0, result.message
    assert len(result.value['data']) == page_size
    assert result.value['more'] is True

    for wrapped_object in result.value['data'].values():
        data = unwrap_binary(client, wrapped_object)
        assert data.type_id == entity_id(binary_type_name)

    cursor = result.value['cursor']

    while result.value['more']:
        result = sql_cursor_get_page(client, cursor)
        assert result.status == 0, result.message

        for wrapped_object in result.value['data'].values():
            data = unwrap_binary(client, wrapped_object)
            assert data.type_id == entity_id(binary_type_name)

    # repeat cleanup
    result = sql_fields(client, 'PUBLIC', drop_query, page_size)
    assert result.status == 0
Exemple #2
0
def test_sql(client):

    # cleanup
    client.sql(drop_query)

    result = sql_fields(
        client,
        'PUBLIC',
        create_query,
        page_size,
        include_field_names=True
    )
    assert result.status == 0, result.message

    for i, data_line in enumerate(initial_data, start=1):
        fname, lname, grade = data_line
        result = sql_fields(
            client,
            'PUBLIC',
            insert_query,
            page_size,
            query_args=[i, fname, lname, grade],
            include_field_names=True
        )
        assert result.status == 0, result.message

    result = cache_get_configuration(client, 'SQL_PUBLIC_STUDENT')
    assert result.status == 0, result.message

    binary_type_name = result.value[PROP_QUERY_ENTITIES][0]['value_type_name']
    result = sql(
        client,
        'SQL_PUBLIC_STUDENT',
        binary_type_name,
        'TRUE',
        page_size
    )
    assert result.status == 0, result.message
    assert len(result.value['data']) == page_size
    assert result.value['more'] is True

    for wrapped_object in result.value['data'].values():
        data = unwrap_binary(client, wrapped_object)
        assert data.type_id == entity_id(binary_type_name)

    cursor = result.value['cursor']

    while result.value['more']:
        result = sql_cursor_get_page(client, cursor)
        assert result.status == 0, result.message

        for wrapped_object in result.value['data'].values():
            data = unwrap_binary(client, wrapped_object)
            assert data.type_id == entity_id(binary_type_name)

    # repeat cleanup
    result = sql_fields(client, 'PUBLIC', drop_query, page_size)
    assert result.status == 0
Exemple #3
0
 def create_stream(self, stream_name, delegate_type, delegate_name,
                   parameters, type_, flags):
     streams_cache = self.ignite.get_cache("streams")
     index_type = None
     stream_data = StreamData(
         agent=self.instance.agent,
         agentdelegate=self.fabric.agent_delegate,
         delegate=delegate_name,
         delegatetype=(entity_id("StreamDelegateType"),
                       delegate_type.value),
         # TODO: Parameters should be passed as BinaryObject
         parameters=ParameterData(parameters=(1, parameters)),
         listeners=(1, []),
         # Perper type utils is not implemented
         indextype=(PerperTypeUtils.get_java_type_name(type_) or type_.name)
         if
         ((flags and StreamFlags.query) != 0 and type_ != None) else None,
         indexfields=(
             1,
             {},
         ),  # self.serializer.get_queriable_fields(type) if (flags & self.stream_flags.query) != 0 and type != None else None,
         ephemeral=(flags and StreamFlags.ephemeral) != 0,
     )
     streams_cache.put(stream_name, stream_data)
     self._logger.debug(f"Created stream with name :{stream_name}")
    def type_id(self) -> int:
        """ Binary object type ID. """
        from pyignite.utils import entity_id

        return getattr(
            self,
            '_type_id',
            entity_id(getattr(self, '_type_name', None))
        )
Exemple #5
0
def __get_binary_type(conn, binary_type, query_id):
    query_struct = Query(OP_GET_BINARY_TYPE, [
        ('type_id', Int),
    ],
                         query_id=query_id,
                         response_type=BinaryTypeResponse)

    return query_perform(query_struct,
                         conn,
                         query_params={
                             'type_id': entity_id(binary_type),
                         })
def test_sql(client, student_table_fixture, page_size):
    cache = client.get_cache('SQL_PUBLIC_STUDENT')
    cache_config = cache.settings

    binary_type_name = cache_config[PROP_QUERY_ENTITIES][0]['value_type_name']

    with cache.select_row('ORDER BY ID ASC', page_size=4) as cursor:
        for i, row in enumerate(cursor):
            k, v = row
            assert k == i

            assert (v.FIRST_NAME, v.LAST_NAME,
                    v.GRADE) == student_table_data[i]
            assert v.type_id == entity_id(binary_type_name)
Exemple #7
0
def get_binary_type(
    connection: 'Connection',
    binary_type: Union[str, int],
    query_id=None,
) -> APIResult:
    """
    Gets the binary type information by type ID.

    :param connection: connection to Ignite server,
    :param binary_type: binary type name or ID,
    :param query_id: (optional) a value generated by client and returned as-is
     in response.query_id. When the parameter is omitted, a random value
     is generated,
    :return: API result data object.
    """

    query_struct = Query(
        OP_GET_BINARY_TYPE,
        [
            ('type_id', Int),
        ],
        query_id=query_id,
    )

    _, send_buffer = query_struct.from_python({
        'type_id':
        entity_id(binary_type),
    })
    connection.send(send_buffer)

    response_head_struct = Response([
        ('type_exists', Bool),
    ])
    response_head_type, recv_buffer = response_head_struct.parse(connection)
    response_head = response_head_type.from_buffer_copy(recv_buffer)
    response_parts = []
    if response_head.type_exists:
        resp_body_type, resp_body_buffer = body_struct.parse(connection)
        response_parts.append(('body', resp_body_type))
        resp_body = resp_body_type.from_buffer_copy(resp_body_buffer)
        recv_buffer += resp_body_buffer
        if resp_body.is_enum:
            resp_enum, resp_enum_buffer = enum_struct.parse(connection)
            response_parts.append(('enums', resp_enum))
            recv_buffer += resp_enum_buffer
        resp_schema_type, resp_schema_buffer = schema_struct.parse(connection)
        response_parts.append(('schema', resp_schema_type))
        recv_buffer += resp_schema_buffer

    response_class = type('GetBinaryTypeResponse', (response_head_type, ), {
        '_pack_': 1,
        '_fields_': response_parts,
    })
    response = response_class.from_buffer_copy(recv_buffer)
    result = APIResult(response)
    if result.status != 0:
        return result
    result.value = {'type_exists': response.type_exists}
    if hasattr(response, 'body'):
        result.value.update(body_struct.to_python(response.body))
    if hasattr(response, 'enums'):
        result.value['enums'] = enum_struct.to_python(response.enums)
    if hasattr(response, 'schema'):
        result.value['schema'] = {
            x['schema_id']: [z['schema_field_id'] for z in x['schema_fields']]
            for x in schema_struct.to_python(response.schema)
        }
    return result
Exemple #8
0
def put_binary_type(
    connection: 'Connection',
    type_name: str,
    affinity_key_field: str = None,
    is_enum=False,
    schema: dict = None,
    query_id=None,
) -> APIResult:
    """
    Registers binary type information in cluster.

    :param connection: connection to Ignite server,
    :param type_name: name of the data type being registered,
    :param affinity_key_field: (optional) name of the affinity key field,
    :param is_enum: (optional) register enum if True, binary object otherwise.
     Defaults to False,
    :param schema: (optional) when register enum, pass a dict of enumerated
     parameter names as keys and an integers as values. When register binary
     type, pass a dict of field names: field types. Binary type with no fields
     is OK,
    :param query_id: (optional) a value generated by client and returned as-is
     in response.query_id. When the parameter is omitted, a random value
     is generated,
    :return: API result data object.
    """
    # prepare data
    if schema is None:
        schema = {}
    type_id = entity_id(type_name)
    data = {
        'type_name': type_name,
        'type_id': type_id,
        'affinity_key_field': affinity_key_field,
        'binary_fields': [],
        'is_enum': is_enum,
        'schema': [],
    }
    schema_id = None
    if is_enum:
        data['enums'] = []
        for literal, ordinal in schema.items():
            data['enums'].append({
                'literal': literal,
                'type_id': ordinal,
            })
    else:
        # assemble schema and calculate schema ID in one go
        schema_id = FNV1_OFFSET_BASIS if schema else 0
        for field_name, data_type in schema.items():
            # TODO: check for allowed data types
            field_id = entity_id(field_name)
            data['binary_fields'].append({
                'field_name':
                field_name,
                'type_id':
                int.from_bytes(data_type.type_code,
                               byteorder=PROTOCOL_BYTE_ORDER),
                'field_id':
                field_id,
            })
            schema_id ^= (field_id & 0xff)
            schema_id = int_overflow(schema_id * FNV1_PRIME)
            schema_id ^= ((field_id >> 8) & 0xff)
            schema_id = int_overflow(schema_id * FNV1_PRIME)
            schema_id ^= ((field_id >> 16) & 0xff)
            schema_id = int_overflow(schema_id * FNV1_PRIME)
            schema_id ^= ((field_id >> 24) & 0xff)
            schema_id = int_overflow(schema_id * FNV1_PRIME)

    data['schema'].append({
        'schema_id':
        schema_id,
        'schema_fields': [{
            'schema_field_id': entity_id(x)
        } for x in schema],
    })

    # do query
    if is_enum:
        query_struct = Query(
            OP_PUT_BINARY_TYPE,
            [
                ('type_id', Int),
                ('type_name', String),
                ('affinity_key_field', String),
                ('binary_fields', binary_fields_struct),
                ('is_enum', Bool),
                ('enums', enum_struct),
                ('schema', schema_struct),
            ],
            query_id=query_id,
        )
    else:
        query_struct = Query(
            OP_PUT_BINARY_TYPE,
            [
                ('type_id', Int),
                ('type_name', String),
                ('affinity_key_field', String),
                ('binary_fields', binary_fields_struct),
                ('is_enum', Bool),
                ('schema', schema_struct),
            ],
            query_id=query_id,
        )
    result = query_struct.perform(connection, query_params=data)
    if result.status == 0:
        result.value = {
            'type_id': type_id,
            'schema_id': schema_id,
        }
    return result
Exemple #9
0
    def from_python(cls, value: object):
        def find_client():
            """
            A nice hack. Extracts the nearest `Client` instance from the
            call stack.
            """
            from pyignite import Client

            frame = None
            try:
                for rec in inspect.stack()[2:]:
                    frame = rec[0]
                    code = frame.f_code
                    for varname in code.co_varnames:
                        suspect = frame.f_locals[varname]
                        if isinstance(suspect, Client):
                            return suspect
            finally:
                del frame

        compact_footer = True  # this is actually used
        client = find_client()
        if client:
            # if no client can be found, the class of the `value` is discarded
            # and the new dataclass is automatically registered later on
            client.register_binary_type(value.__class__)
            compact_footer = client.compact_footer
        else:
            raise Warning('Can not register binary type {}'.format(
                value.type_name))

        # prepare header
        header_class = cls.build_header()
        header = header_class()
        header.type_code = int.from_bytes(cls.type_code,
                                          byteorder=PROTOCOL_BYTE_ORDER)

        header.flags = cls.USER_TYPE | cls.HAS_SCHEMA
        if compact_footer:
            header.flags |= cls.COMPACT_FOOTER
        header.version = value.version
        header.type_id = value.type_id
        header.schema_id = value.schema_id

        # create fields and calculate offsets
        field_buffer = b''
        offsets = [ctypes.sizeof(header_class)]
        schema_items = list(value.schema.items())
        for field_name, field_type in schema_items:
            partial_buffer = field_type.from_python(
                getattr(value, field_name, getattr(field_type, 'default',
                                                   None)))
            offsets.append(max(offsets) + len(partial_buffer))
            field_buffer += partial_buffer

        offsets = offsets[:-1]

        # create footer
        if max(offsets, default=0) < 255:
            header.flags |= cls.OFFSET_ONE_BYTE
        elif max(offsets) < 65535:
            header.flags |= cls.OFFSET_TWO_BYTES
        schema_class = cls.schema_type(header.flags) * len(offsets)
        schema = schema_class()
        if compact_footer:
            for i, offset in enumerate(offsets):
                schema[i] = offset
        else:
            for i, offset in enumerate(offsets):
                schema[i].field_id = entity_id(schema_items[i][0])
                schema[i].offset = offset
        # calculate size and hash code
        header.schema_offset = ctypes.sizeof(header_class) + len(field_buffer)
        header.length = header.schema_offset + ctypes.sizeof(schema_class)
        header.hash_code = hashcode(field_buffer + bytes(schema))

        return bytes(header) + field_buffer + bytes(schema)
Exemple #10
0
def __put_binary_type(connection, type_name, affinity_key_field, is_enum,
                      schema, query_id):
    # prepare data
    if schema is None:
        schema = {}
    type_id = entity_id(type_name)
    data = {
        'type_name': type_name,
        'type_id': type_id,
        'affinity_key_field': affinity_key_field,
        'binary_fields': [],
        'is_enum': is_enum,
        'schema': [],
    }
    s_id = None
    if is_enum:
        data['enums'] = []
        for literal, ordinal in schema.items():
            data['enums'].append({
                'literal': literal,
                'type_id': ordinal,
            })
    else:
        # assemble schema and calculate schema ID in one go
        s_id = schema_id(schema)
        for field_name, data_type in schema.items():
            # TODO: check for allowed data types
            field_id = entity_id(field_name)
            data['binary_fields'].append({
                'field_name':
                field_name,
                'type_id':
                int.from_bytes(data_type.type_code,
                               byteorder=PROTOCOL_BYTE_ORDER),
                'field_id':
                field_id,
            })

    data['schema'].append({
        'schema_id':
        s_id,
        'schema_fields': [{
            'schema_field_id': entity_id(x)
        } for x in schema],
    })

    # do query
    if is_enum:
        query_struct = Query(
            OP_PUT_BINARY_TYPE,
            [
                ('type_id', Int),
                ('type_name', String),
                ('affinity_key_field', String),
                ('binary_fields', binary_fields_struct),
                ('is_enum', Bool),
                ('enums', enum_struct),
                ('schema', schema_struct),
            ],
            query_id=query_id,
        )
    else:
        query_struct = Query(
            OP_PUT_BINARY_TYPE,
            [
                ('type_id', Int),
                ('type_name', String),
                ('affinity_key_field', String),
                ('binary_fields', binary_fields_struct),
                ('is_enum', Bool),
                ('schema', schema_struct),
            ],
            query_id=query_id,
        )
    return query_perform(query_struct,
                         connection,
                         query_params=data,
                         post_process_fun=__post_process_put_binary(type_id))
Exemple #11
0
def get_binary_type(
    connection: 'Connection', binary_type: Union[str, int], query_id=None,
) -> APIResult:
    """
    Gets the binary type information by type ID.

    :param connection: connection to Ignite server,
    :param binary_type: binary type name or ID,
    :param query_id: (optional) a value generated by client and returned as-is
     in response.query_id. When the parameter is omitted, a random value
     is generated,
    :return: API result data object.
    """

    query_struct = Query(
        OP_GET_BINARY_TYPE,
        [
            ('type_id', Int),
        ],
        query_id=query_id,
    )

    _, send_buffer = query_struct.from_python({
        'type_id': entity_id(binary_type),
    })
    connection.send(send_buffer)

    response_head_struct = Response([
        ('type_exists', Bool),
    ])
    response_head_type, recv_buffer = response_head_struct.parse(connection)
    response_head = response_head_type.from_buffer_copy(recv_buffer)
    response_parts = []
    if response_head.type_exists:
        resp_body_type, resp_body_buffer = body_struct.parse(connection)
        response_parts.append(('body', resp_body_type))
        resp_body = resp_body_type.from_buffer_copy(resp_body_buffer)
        recv_buffer += resp_body_buffer
        if resp_body.is_enum:
            resp_enum, resp_enum_buffer = enum_struct.parse(connection)
            response_parts.append(('enums', resp_enum))
            recv_buffer += resp_enum_buffer
        resp_schema_type, resp_schema_buffer = schema_struct.parse(connection)
        response_parts.append(('schema', resp_schema_type))
        recv_buffer += resp_schema_buffer

    response_class = type(
        'GetBinaryTypeResponse',
        (response_head_type,),
        {
            '_pack_': 1,
            '_fields_': response_parts,
        }
    )
    response = response_class.from_buffer_copy(recv_buffer)
    result = APIResult(response)
    if result.status != 0:
        return result
    result.value = {
        'type_exists': response.type_exists
    }
    if hasattr(response, 'body'):
        result.value.update(body_struct.to_python(response.body))
    if hasattr(response, 'enums'):
        result.value['enums'] = enum_struct.to_python(response.enums)
    if hasattr(response, 'schema'):
        result.value['schema'] = {
            x['schema_id']: [
                z['schema_field_id'] for z in x['schema_fields']
            ]
            for x in schema_struct.to_python(response.schema)
        }
    return result
Exemple #12
0
def put_binary_type(
    connection: 'Connection', type_name: str, affinity_key_field: str=None,
    is_enum=False, schema: dict=None, query_id=None,
) -> APIResult:
    """
    Registers binary type information in cluster.

    :param connection: connection to Ignite server,
    :param type_name: name of the data type being registered,
    :param affinity_key_field: (optional) name of the affinity key field,
    :param is_enum: (optional) register enum if True, binary object otherwise.
     Defaults to False,
    :param schema: (optional) when register enum, pass a dict of enumerated
     parameter names as keys and an integers as values. When register binary
     type, pass a dict of field names: field types. Binary type with no fields
     is OK,
    :param query_id: (optional) a value generated by client and returned as-is
     in response.query_id. When the parameter is omitted, a random value
     is generated,
    :return: API result data object.
    """
    # prepare data
    if schema is None:
        schema = {}
    type_id = entity_id(type_name)
    data = {
        'type_name': type_name,
        'type_id': type_id,
        'affinity_key_field': affinity_key_field,
        'binary_fields': [],
        'is_enum': is_enum,
        'schema': [],
    }
    schema_id = None
    if is_enum:
        data['enums'] = []
        for literal, ordinal in schema.items():
            data['enums'].append({
                'literal': literal,
                'type_id': ordinal,
            })
    else:
        # assemble schema and calculate schema ID in one go
        schema_id = FNV1_OFFSET_BASIS if schema else 0
        for field_name, data_type in schema.items():
            # TODO: check for allowed data types
            field_id = entity_id(field_name)
            data['binary_fields'].append({
                'field_name': field_name,
                'type_id': int.from_bytes(
                    data_type.type_code,
                    byteorder=PROTOCOL_BYTE_ORDER
                ),
                'field_id': field_id,
            })
            schema_id ^= (field_id & 0xff)
            schema_id = int_overflow(schema_id * FNV1_PRIME)
            schema_id ^= ((field_id >> 8) & 0xff)
            schema_id = int_overflow(schema_id * FNV1_PRIME)
            schema_id ^= ((field_id >> 16) & 0xff)
            schema_id = int_overflow(schema_id * FNV1_PRIME)
            schema_id ^= ((field_id >> 24) & 0xff)
            schema_id = int_overflow(schema_id * FNV1_PRIME)

    data['schema'].append({
        'schema_id': schema_id,
        'schema_fields': [
            {'schema_field_id': entity_id(x)} for x in schema
        ],
    })

    # do query
    if is_enum:
        query_struct = Query(
            OP_PUT_BINARY_TYPE,
            [
                ('type_id', Int),
                ('type_name', String),
                ('affinity_key_field', String),
                ('binary_fields', binary_fields_struct),
                ('is_enum', Bool),
                ('enums', enum_struct),
                ('schema', schema_struct),
            ],
            query_id=query_id,
        )
    else:
        query_struct = Query(
            OP_PUT_BINARY_TYPE,
            [
                ('type_id', Int),
                ('type_name', String),
                ('affinity_key_field', String),
                ('binary_fields', binary_fields_struct),
                ('is_enum', Bool),
                ('schema', schema_struct),
            ],
            query_id=query_id,
        )
    result = query_struct.perform(connection, query_params=data)
    if result.status == 0:
        result.value = {
            'type_id': type_id,
            'schema_id': schema_id,
        }
    return result
Exemple #13
0
    def from_python(cls, value: object):

        def find_client():
            """
            A nice hack. Extracts the nearest `Client` instance from the
            call stack.
            """
            from pyignite import Client

            frame = None
            try:
                for rec in inspect.stack()[2:]:
                    frame = rec[0]
                    code = frame.f_code
                    for varname in code.co_varnames:
                        suspect = frame.f_locals[varname]
                        if isinstance(suspect, Client):
                            return suspect
            finally:
                del frame

        compact_footer = True  # this is actually used
        client = find_client()
        if client:
            # if no client can be found, the class of the `value` is discarded
            # and the new dataclass is automatically registered later on
            client.register_binary_type(value.__class__)
            compact_footer = client.compact_footer
        else:
            raise Warning(
                'Can not register binary type {}'.format(value.type_name)
            )

        # prepare header
        header_class = cls.build_header()
        header = header_class()
        header.type_code = int.from_bytes(
            cls.type_code,
            byteorder=PROTOCOL_BYTE_ORDER
        )

        header.flags = cls.USER_TYPE | cls.HAS_SCHEMA
        if compact_footer:
            header.flags |= cls.COMPACT_FOOTER
        header.version = value.version
        header.type_id = value.type_id
        header.schema_id = value.schema_id

        # create fields and calculate offsets
        field_buffer = b''
        offsets = [ctypes.sizeof(header_class)]
        schema_items = list(value.schema.items())
        for field_name, field_type in schema_items:
            partial_buffer = field_type.from_python(
                getattr(
                    value, field_name, getattr(field_type, 'default', None)
                )
            )
            offsets.append(max(offsets) + len(partial_buffer))
            field_buffer += partial_buffer

        offsets = offsets[:-1]

        # create footer
        if max(offsets, default=0) < 255:
            header.flags |= cls.OFFSET_ONE_BYTE
        elif max(offsets) < 65535:
            header.flags |= cls.OFFSET_TWO_BYTES
        schema_class = cls.schema_type(header.flags) * len(offsets)
        schema = schema_class()
        if compact_footer:
            for i, offset in enumerate(offsets):
                schema[i] = offset
        else:
            for i, offset in enumerate(offsets):
                schema[i].field_id = entity_id(schema_items[i][0])
                schema[i].offset = offset
        # calculate size and hash code
        header.schema_offset = ctypes.sizeof(header_class) + len(field_buffer)
        header.length = header.schema_offset + ctypes.sizeof(schema_class)
        header.hash_code = hashcode(field_buffer + bytes(schema))

        return bytes(header) + field_buffer + bytes(schema)