def check_blob_id(blob_id: str, column_index: int): """Checks blob id""" if blob_id is None: raise aceql.Error("No value found for column_index " + str(column_index), 0, None, None, 200) if not blob_id.endswith(".blob"): raise aceql.Error("Fetched value does not correspond to a BLOB Id: " + str(blob_id), 0, None, None, 200)
def executemany(self, sql: str, seq_params: list) -> List[int]: """Execute the given SQL operation multiple times The executemany() method will execute the operation iterating over the list of parameters in seq_params. Note that the SQL operation are transferred with one unique HTTP call to the server side which will execute them using a JDBC PreparedStatement in batch mode: this will allow fast execution. """ if not ConnectionUtil.is_batch_supported(self.__connection): raise Exception("AceQL Server version must be >= " + ConnectionUtil.BATCH_MIN_SERVER_VERSION + " in order to call executemany.") batch_file_parameters = FileUtil.build_batch_file() try: self.__raise_error_if_closed() if sql is None: raise TypeError("sql is null!") sql = sql.strip() if not CursorUtil.is_update_call(sql): raise aceql.Error("Only DELETE, INSERT or UPDATE calls are supported this AceQL Client version.", 0, None, None, 200) if not seq_params: return # The addBatch() part for params in seq_params: the_cursor_util: CursorUtil = CursorUtil() parms_dict: dict = the_cursor_util.get_http_parameters_dict(params) blob_ids: list = the_cursor_util.blob_ids if blob_ids is not None and len(blob_ids) > 0: raise aceql.Error( "Cannot call executemany for a table with BLOB parameter in this AceQL Client version.", 0, None, None, 200) prep_statement_parameters_holder_schema = marshmallow_dataclass.class_schema( PrepStatementParametersHolder) prep_statement_parameters_holder: PrepStatementParametersHolder = PrepStatementParametersHolder( parms_dict) json_string: str = prep_statement_parameters_holder_schema().dumps(prep_statement_parameters_holder) with open(batch_file_parameters, "a") as fd: fd.write(json_string + "\n") # The executeBatch() part rows: List[int] = self.__aceql_http_api.execute_batch(sql, batch_file_parameters) return rows finally: CursorUtil.remove_file_safe(batch_file_parameters)
def get_blob_stream(self, column_index: int): """ Returns a BLOB stream on a column in the current row. The column index starts at 0. """ self.__raise_error_if_closed() if column_index is None: raise TypeError("column_index is null!") values_per_column_index: dict = self.__row_parser.get_values_per_col_index() if values_per_column_index is None: raise aceql.Error("Not positioned on a row. (Seems no fetchone() call done.)", 0, None, None, 200) blob_id = values_per_column_index[column_index] Cursor.check_blob_id(blob_id, column_index) # OK! we have a valid BLOB Id: response = self.__aceql_http_api.get_blob_stream(blob_id) return response
def get_blob_length(self, column_index: int) -> int: """ Gets the remote BLOB length on a column in the current row To be used if progress indicator needed. """ self.__raise_error_if_closed() if column_index is None: raise TypeError("column_index is null!") values_per_column_index: dict = self.__row_parser.get_values_per_col_index() AceQLDebug.debug("values_per_column_index: " + str(values_per_column_index)) if values_per_column_index is None: raise aceql.Error("Not positioned on a row. (No fetchone call done.)", 0, None, None, 200) blob_id = values_per_column_index[column_index] if AceQLDebugParms.DEBUG_ON: print("blob_id: " + str(blob_id)) Cursor.check_blob_id(blob_id, column_index) blob_length = self.__aceql_http_api.get_blob_length(blob_id) return blob_length
def __raise_error_if_closed(self): if self.__is_closed: raise aceql.Error("Invalid call: Cursor is closed.", 0, None, None, 200)