def bind(statement: monetdbe_statement, data: Any, parameter_nr: int, type_info=None) -> None: try: _type_info = type_info[parameter_nr] if _type_info.sql_type == 'decimal': d = int(Decimal(data) * (Decimal(10)**_type_info.scale)) if _type_info.impl_type == 'bte': prepared = monetdbe_decimal_to_bte(d) elif _type_info.impl_type == 'sht': prepared = monetdbe_decimal_to_sht(d) elif _type_info.impl_type == 'int': prepared = monetdbe_decimal_to_int(d) elif _type_info.impl_type == 'lng': prepared = monetdbe_decimal_to_lng(d) else: raise NotImplementedError( "Unknown decimal implementation type") else: prepared = prepare_bind(data) except IndexError as e: from monetdbe import exceptions raise exceptions.ProgrammingError from e check_error(lib.monetdbe_bind(statement, prepared, parameter_nr))
def prepare(self, query: str) -> monetdbe_statement: self._switch() stmt = ffi.new("monetdbe_statement **") check_error( lib.monetdbe_prepare(self._monetdbe_database, str(query).encode(), stmt)) return stmt[0]
def query(self, query: str, make_result: bool = False) -> Tuple[Optional[Any], int]: """ Execute a query. Args: query: the query make_result: Create and return a result object. If enabled, you need to call cleanup_result on the result afterwards returns: result, affected_rows """ self._switch() if make_result: p_result = ffi.new("monetdbe_result **") else: p_result = ffi.NULL affected_rows = ffi.new("monetdbe_cnt *") check_error( lib.monetdbe_query(self._monetdbe_database, query.encode(), p_result, affected_rows)) if make_result: result = p_result[0] else: result = None return result, affected_rows[0]
def append(self, table: str, data: Mapping[str, np.ndarray], schema: str = 'sys') -> None: """ Directly append an array structure """ n_columns = len(data) existing_columns = list(self.get_columns(schema=schema, table=table)) existing_names, existing_types = zip(*existing_columns) if not set(existing_names) == set(data.keys()): error = f"Appended column names ({', '.join(str(i) for i in data.keys())}) " \ f"don't match existing column names ({', '.join(existing_names)})" raise exceptions.ProgrammingError(error) work_columns = ffi.new(f'monetdbe_column * [{n_columns}]') work_objs = [] for column_num, (column_name, existing_type) in enumerate(existing_columns): column_values = data[column_name] work_column = ffi.new('monetdbe_column *') work_type_string, work_type = numpy_monetdb_map(column_values.dtype) if not work_type == existing_type: existing_type_string = monet_numpy_map[existing_type][0] error = f"Type '{work_type_string}' for appended column '{column_name}' " \ f"does not match table type '{existing_type_string}'" raise exceptions.ProgrammingError(error) work_column.type = work_type work_column.count = column_values.shape[0] work_column.name = ffi.new('char[]', column_name.encode()) work_column.data = ffi.cast(f"{work_type_string} *", ffi.from_buffer(column_values)) work_columns[column_num] = work_column work_objs.append(work_column) check_error(lib.monetdbe_append(self._connection, schema.encode(), table.encode(), work_columns, n_columns))
def execute(statement: monetdbe_statement, make_result: bool = False) -> Tuple[monetdbe_result, int]: if make_result: p_result = ffi.new("monetdbe_result **") else: p_result = ffi.NULL affected_rows = ffi.new("monetdbe_cnt *") check_error(lib.monetdbe_execute(statement, p_result, affected_rows)) if make_result: result = p_result[0] else: result = None return result, affected_rows[0]
def prepare(self, query: str) -> monetdbe_statement: self._switch() stmt = ffi.new("monetdbe_statement **") p_result = ffi.new("monetdbe_result **") check_error( lib.monetdbe_prepare(self._monetdbe_database, str(query).encode(), stmt, p_result)) input_parameter_info = list() for r in range(p_result[0].nrows): if (extract(result_fetch(p_result[0], 3), r)) is None: row = TypeInfo(impl_type=extract(result_fetch(p_result[0], 6), r), sql_type=extract(result_fetch(p_result[0], 0), r), scale=extract(result_fetch(p_result[0], 2), r)) input_parameter_info.append(row) return stmt[0], input_parameter_info
def bind(statement: monetdbe_statement, data: Any, parameter_nr: int) -> None: prepared = prepare_bind(data) check_error(lib.monetdbe_bind(statement, prepared, parameter_nr))
def get_autocommit() -> bool: value = ffi.new("int *") check_error(lib.monetdbe_get_autocommit(value)) return bool(value[0])
def set_autocommit(self, value: bool) -> None: self._switch() check_error( lib.monetdbe_set_autocommit(self._monetdbe_database, int(value)))
def result_fetch(result: monetdbe_result, column: int) -> monetdbe_column: p_rcol = ffi.new("monetdbe_column **") check_error(lib.monetdbe_result_fetch(result, p_rcol, column)) return p_rcol[0]
def cleanup_result(self, result: monetdbe_result): _logger.info("cleanup_result called") if result and self._monetdbe_database: check_error( lib.monetdbe_cleanup_result(self._monetdbe_database, result))
def append(self, table: str, data: Mapping[str, np.ndarray], schema: str = 'sys') -> None: """ Directly append an array structure """ self._switch() n_columns = len(data) existing_columns = list(self.get_columns(schema=schema, table=table)) existing_names, existing_types = zip(*existing_columns) if not set(existing_names) == set(data.keys()): error = f"Appended column names ({', '.join(str(i) for i in data.keys())}) " \ f"don't match existing column names ({', '.join(existing_names)})" raise exceptions.ProgrammingError(error) work_columns = ffi.new(f'monetdbe_column * [{n_columns}]') work_objs = [] # cffi_objects assists to keep all in-memory native data structure alive during the execution of this call cffi_objects = list() for column_num, (column_name, existing_type) in enumerate(existing_columns): column_values = data[column_name] work_column = ffi.new('monetdbe_column *') type_info = numpy_monetdb_map(column_values.dtype) # try to convert the values if types don't match if type_info.c_type != existing_type: if type_info.c_type == lib.monetdbe_timestamp and existing_type == lib.monetdbe_date and np.issubdtype( column_values.dtype, np.datetime64): """ We are going to cast to a monetdbe_date and consider monetdbe_timestamp as a 'base type' to signal this. """ type_info = timestamp_to_date() else: precision_warning(type_info.c_type, existing_type) to_numpy_type = monet_c_type_map[existing_type].numpy_type try: column_values = column_values.astype(to_numpy_type) type_info = numpy_monetdb_map(column_values.dtype) except Exception as e: existing_type_string = monet_c_type_map[ existing_type].c_string_type error = f"Can't convert '{type_info.c_string_type}' " \ f"to type '{existing_type_string}' for column '{column_name}': {e} " raise ValueError(error) work_column.type = type_info.c_type work_column.count = column_values.shape[0] work_column.name = ffi.new('char[]', column_name.encode()) if type_info.numpy_type.kind == 'M': t = ffi.new('monetdbe_data_timestamp[]', work_column.count) cffi_objects.append(t) unit = np.datetime_data(column_values.dtype)[0].encode() p = ffi.from_buffer("int64_t*", column_values) lib.initialize_timestamp_array_from_numpy( self._monetdbe_database, t, work_column.count, p, unit, existing_type) work_column.data = t elif type_info.numpy_type.kind == 'U': # first massage the numpy array of unicode into a matrix of null terminated rows of bytes. m = ffi.from_buffer( "bool*", column_values.mask) if np.ma.isMaskedArray( column_values) else 0 # type: ignore[attr-defined] cffi_objects.append(m) v = np.char.encode(column_values).view('b').reshape( (work_column.count, -1)) v = np.c_[v, np.zeros(work_column.count, dtype=np.int8)] stride_length = v.shape[1] cffi_objects.append(v) t = ffi.new('char*[]', work_column.count) cffi_objects.append(t) p = ffi.from_buffer("char*", v) cffi_objects.append(p) lib.initialize_string_array_from_numpy(t, work_column.count, p, stride_length, ffi.cast("bool*", m)) work_column.data = t else: p = ffi.from_buffer(f"{type_info.c_string_type}*", column_values) cffi_objects.append(p) work_column.data = p work_columns[column_num] = work_column work_objs.append(work_column) check_error( lib.monetdbe_append(self._monetdbe_database, schema.encode(), table.encode(), work_columns, n_columns))
def cleanup_result(self, result: ffi.CData): _logger.info("cleanup_result called") if result and self._connection: check_error(lib.monetdbe_cleanup_result(self._connection, result))
def set_autocommit(self, value: bool) -> None: check_error(lib.monetdbe_set_autocommit(self._connection, int(value)))