def sql_fields_cursor_get_page( connection: 'Connection', cursor: int, field_count: int, query_id=None, ) -> APIResult: """ Retrieves the next query result page by cursor ID from `sql_fields`. :param connection: connection to Ignite server, :param cursor: cursor ID, :param field_count: a number of fields in a row, :param query_id: (optional) a value generated by client and returned as-is in response.query_id. When the parameter is omitted, a random value is generated, :return: API result data object. Contains zero status and a value of type dict with results on success, non-zero status and an error description otherwise. Value dict is of following format: * `data`: list, result values, * `more`: bool, True if more data is available for subsequent ‘sql_fields_cursor_get_page’ calls. """ query_struct = Query( OP_QUERY_SQL_FIELDS_CURSOR_GET_PAGE, [ ('cursor', Long), ], query_id=query_id, ) _, send_buffer = query_struct.from_python({ 'cursor': cursor, }) connection.send(send_buffer) response_struct = Response([ ('data', StructArray([('field_{}'.format(i), AnyDataObject) for i in range(field_count)])), ('more', Bool), ]) response_class, recv_buffer = response_struct.parse(connection) response = response_class.from_buffer_copy(recv_buffer) result = APIResult(response) if result.status != 0: return result value = response_struct.to_python(response) result.value = {'data': [], 'more': value['more']} for row_dict in value['data']: row = [] for field_key in sorted(row_dict.keys()): row.append(row_dict[field_key]) result.value['data'].append(row) return result
def sql_fields_cursor_get_page( connection: 'Connection', cursor: int, field_count: int, query_id=None, ) -> APIResult: """ Retrieves the next query result page by cursor ID from `sql_fields`. :param connection: connection to Ignite server, :param cursor: cursor ID, :param field_count: a number of fields in a row, :param query_id: (optional) a value generated by client and returned as-is in response.query_id. When the parameter is omitted, a random value is generated, :return: API result data object. Contains zero status and a value of type dict with results on success, non-zero status and an error description otherwise. Value dict is of following format: * `data`: list, result values, * `more`: bool, True if more data is available for subsequent ‘sql_fields_cursor_get_page’ calls. """ query_struct = Query( OP_QUERY_SQL_FIELDS_CURSOR_GET_PAGE, [ ('cursor', Long), ], query_id=query_id, ) _, send_buffer = query_struct.from_python({ 'cursor': cursor, }) connection.send(send_buffer) response_struct = Response([ ('data', StructArray([ ('field_{}'.format(i), AnyDataObject) for i in range(field_count) ])), ('more', Bool), ]) response_class, recv_buffer = response_struct.parse(connection) response = response_class.from_buffer_copy(recv_buffer) result = APIResult(response) if result.status != 0: return result value = response_struct.to_python(response) result.value = { 'data': [], 'more': value['more'] } for row_dict in value['data']: row = [] for field_key in sorted(row_dict.keys()): row.append(row_dict[field_key]) result.value['data'].append(row) return result
def sql_fields(connection: 'Connection', cache: Union[str, int], query_str: str, page_size: int, query_args=None, schema: str = None, statement_type: int = StatementType.ANY, distributed_joins: bool = False, local: bool = False, replicated_only: bool = False, enforce_join_order: bool = False, collocated: bool = False, lazy: bool = False, include_field_names: bool = False, max_rows: int = -1, timeout: int = 0, binary: bool = False, query_id=None) -> APIResult: """ Performs SQL fields query. :param connection: connection to Ignite server, :param cache: name or ID of the cache, :param query_str: SQL query string, :param page_size: cursor page size, :param query_args: (optional) query arguments. List of values or (value, type hint) tuples, :param schema: (optional) schema for the query. Defaults to `PUBLIC`, :param statement_type: (optional) statement type. Can be: * StatementType.ALL − any type (default), * StatementType.SELECT − select, * StatementType.UPDATE − update. :param distributed_joins: (optional) distributed joins. Defaults to False, :param local: (optional) pass True if this query should be executed on local node only. Defaults to False, :param replicated_only: (optional) whether query contains only replicated tables or not. Defaults to False, :param enforce_join_order: (optional) enforce join order. Defaults to False, :param collocated: (optional) whether your data is co-located or not. Defaults to False, :param lazy: (optional) lazy query execution. Defaults to False, :param include_field_names: (optional) include field names in result. Defaults to False, :param max_rows: (optional) query-wide maximum of rows. Defaults to -1 (all rows), :param timeout: (optional) non-negative timeout value in ms. Zero disables timeout (default), :param binary: (optional) pass True to keep the value in binary form. False by default, :param query_id: (optional) a value generated by client and returned as-is in response.query_id. When the parameter is omitted, a random value is generated, :return: API result data object. Contains zero status and a value of type dict with results on success, non-zero status and an error description otherwise. Value dict is of following format: * `cursor`: int, cursor ID, * `data`: list, result values, * `more`: bool, True if more data is available for subsequent ‘sql_fields_cursor_get_page’ calls. """ if query_args is None: query_args = [] query_struct = Query( OP_QUERY_SQL_FIELDS, [ ('hash_code', Int), ('flag', Byte), ('schema', String), ('page_size', Int), ('max_rows', Int), ('query_str', String), ('query_args', AnyDataArray()), ('statement_type', StatementType), ('distributed_joins', Bool), ('local', Bool), ('replicated_only', Bool), ('enforce_join_order', Bool), ('collocated', Bool), ('lazy', Bool), ('timeout', Long), ('include_field_names', Bool), ], query_id=query_id, ) _, send_buffer = query_struct.from_python({ 'hash_code': cache_id(cache), 'flag': 1 if binary else 0, 'schema': schema, 'page_size': page_size, 'max_rows': max_rows, 'query_str': query_str, 'query_args': query_args, 'statement_type': statement_type, 'distributed_joins': distributed_joins, 'local': local, 'replicated_only': replicated_only, 'enforce_join_order': enforce_join_order, 'collocated': collocated, 'lazy': lazy, 'timeout': timeout, 'include_field_names': include_field_names, }) connection.send(send_buffer) response_struct = SQLResponse( include_field_names=include_field_names, has_cursor=True, ) response_class, recv_buffer = response_struct.parse(connection) response = response_class.from_buffer_copy(recv_buffer) result = APIResult(response) if result.status != 0: return result result.value = response_struct.to_python(response) return result
def get_binary_type( connection: 'Connection', binary_type: Union[str, int], query_id=None, ) -> APIResult: """ Gets the binary type information by type ID. :param connection: connection to Ignite server, :param binary_type: binary type name or ID, :param query_id: (optional) a value generated by client and returned as-is in response.query_id. When the parameter is omitted, a random value is generated, :return: API result data object. """ query_struct = Query( OP_GET_BINARY_TYPE, [ ('type_id', Int), ], query_id=query_id, ) _, send_buffer = query_struct.from_python({ 'type_id': entity_id(binary_type), }) connection.send(send_buffer) response_head_struct = Response([ ('type_exists', Bool), ]) response_head_type, recv_buffer = response_head_struct.parse(connection) response_head = response_head_type.from_buffer_copy(recv_buffer) response_parts = [] if response_head.type_exists: resp_body_type, resp_body_buffer = body_struct.parse(connection) response_parts.append(('body', resp_body_type)) resp_body = resp_body_type.from_buffer_copy(resp_body_buffer) recv_buffer += resp_body_buffer if resp_body.is_enum: resp_enum, resp_enum_buffer = enum_struct.parse(connection) response_parts.append(('enums', resp_enum)) recv_buffer += resp_enum_buffer resp_schema_type, resp_schema_buffer = schema_struct.parse(connection) response_parts.append(('schema', resp_schema_type)) recv_buffer += resp_schema_buffer response_class = type('GetBinaryTypeResponse', (response_head_type, ), { '_pack_': 1, '_fields_': response_parts, }) response = response_class.from_buffer_copy(recv_buffer) result = APIResult(response) if result.status != 0: return result result.value = {'type_exists': response.type_exists} if hasattr(response, 'body'): result.value.update(body_struct.to_python(response.body)) if hasattr(response, 'enums'): result.value['enums'] = enum_struct.to_python(response.enums) if hasattr(response, 'schema'): result.value['schema'] = { x['schema_id']: [z['schema_field_id'] for z in x['schema_fields']] for x in schema_struct.to_python(response.schema) } return result
def sql_fields( connection: 'Connection', cache: Union[str, int], query_str: str, page_size: int, query_args=None, schema: str=None, statement_type: int=StatementType.ANY, distributed_joins: bool=False, local: bool=False, replicated_only: bool=False, enforce_join_order: bool=False, collocated: bool=False, lazy: bool=False, include_field_names: bool=False, max_rows: int=-1, timeout: int=0, binary: bool=False, query_id=None ) -> APIResult: """ Performs SQL fields query. :param connection: connection to Ignite server, :param cache: name or ID of the cache, :param query_str: SQL query string, :param page_size: cursor page size, :param query_args: (optional) query arguments. List of values or (value, type hint) tuples, :param schema: (optional) schema for the query. Defaults to `PUBLIC`, :param statement_type: (optional) statement type. Can be: * StatementType.ALL − any type (default), * StatementType.SELECT − select, * StatementType.UPDATE − update. :param distributed_joins: (optional) distributed joins. Defaults to False, :param local: (optional) pass True if this query should be executed on local node only. Defaults to False, :param replicated_only: (optional) whether query contains only replicated tables or not. Defaults to False, :param enforce_join_order: (optional) enforce join order. Defaults to False, :param collocated: (optional) whether your data is co-located or not. Defaults to False, :param lazy: (optional) lazy query execution. Defaults to False, :param include_field_names: (optional) include field names in result. Defaults to False, :param max_rows: (optional) query-wide maximum of rows. Defaults to -1 (all rows), :param timeout: (optional) non-negative timeout value in ms. Zero disables timeout (default), :param binary: (optional) pass True to keep the value in binary form. False by default, :param query_id: (optional) a value generated by client and returned as-is in response.query_id. When the parameter is omitted, a random value is generated, :return: API result data object. Contains zero status and a value of type dict with results on success, non-zero status and an error description otherwise. Value dict is of following format: * `cursor`: int, cursor ID, * `data`: list, result values, * `more`: bool, True if more data is available for subsequent ‘sql_fields_cursor_get_page’ calls. """ if query_args is None: query_args = [] query_struct = Query( OP_QUERY_SQL_FIELDS, [ ('hash_code', Int), ('flag', Byte), ('schema', String), ('page_size', Int), ('max_rows', Int), ('query_str', String), ('query_args', AnyDataArray()), ('statement_type', StatementType), ('distributed_joins', Bool), ('local', Bool), ('replicated_only', Bool), ('enforce_join_order', Bool), ('collocated', Bool), ('lazy', Bool), ('timeout', Long), ('include_field_names', Bool), ], query_id=query_id, ) _, send_buffer = query_struct.from_python({ 'hash_code': cache_id(cache), 'flag': 1 if binary else 0, 'schema': schema, 'page_size': page_size, 'max_rows': max_rows, 'query_str': query_str, 'query_args': query_args, 'statement_type': statement_type, 'distributed_joins': distributed_joins, 'local': local, 'replicated_only': replicated_only, 'enforce_join_order': enforce_join_order, 'collocated': collocated, 'lazy': lazy, 'timeout': timeout, 'include_field_names': include_field_names, }) connection.send(send_buffer) response_struct = SQLResponse( include_field_names=include_field_names, has_cursor=True, ) response_class, recv_buffer = response_struct.parse(connection) response = response_class.from_buffer_copy(recv_buffer) result = APIResult(response) if result.status != 0: return result result.value = response_struct.to_python(response) return result
def get_binary_type( connection: 'Connection', binary_type: Union[str, int], query_id=None, ) -> APIResult: """ Gets the binary type information by type ID. :param connection: connection to Ignite server, :param binary_type: binary type name or ID, :param query_id: (optional) a value generated by client and returned as-is in response.query_id. When the parameter is omitted, a random value is generated, :return: API result data object. """ query_struct = Query( OP_GET_BINARY_TYPE, [ ('type_id', Int), ], query_id=query_id, ) _, send_buffer = query_struct.from_python({ 'type_id': entity_id(binary_type), }) connection.send(send_buffer) response_head_struct = Response([ ('type_exists', Bool), ]) response_head_type, recv_buffer = response_head_struct.parse(connection) response_head = response_head_type.from_buffer_copy(recv_buffer) response_parts = [] if response_head.type_exists: resp_body_type, resp_body_buffer = body_struct.parse(connection) response_parts.append(('body', resp_body_type)) resp_body = resp_body_type.from_buffer_copy(resp_body_buffer) recv_buffer += resp_body_buffer if resp_body.is_enum: resp_enum, resp_enum_buffer = enum_struct.parse(connection) response_parts.append(('enums', resp_enum)) recv_buffer += resp_enum_buffer resp_schema_type, resp_schema_buffer = schema_struct.parse(connection) response_parts.append(('schema', resp_schema_type)) recv_buffer += resp_schema_buffer response_class = type( 'GetBinaryTypeResponse', (response_head_type,), { '_pack_': 1, '_fields_': response_parts, } ) response = response_class.from_buffer_copy(recv_buffer) result = APIResult(response) if result.status != 0: return result result.value = { 'type_exists': response.type_exists } if hasattr(response, 'body'): result.value.update(body_struct.to_python(response.body)) if hasattr(response, 'enums'): result.value['enums'] = enum_struct.to_python(response.enums) if hasattr(response, 'schema'): result.value['schema'] = { x['schema_id']: [ z['schema_field_id'] for z in x['schema_fields'] ] for x in schema_struct.to_python(response.schema) } return result