def __execute_query(self, sql: str, params: tuple = ()): """Executes a SELECT on remote database""" self.__raise_error_if_closed() self.row_count = 0 self.__description: list = [] the_cursor_util = CursorUtil() parms_dict: dict = the_cursor_util.get_http_parameters_dict(params) is_prepared_statement = False if len(parms_dict) > 0: is_prepared_statement = True self.__result_set_info = self.__aceql_http_api.execute_query(sql, is_prepared_statement, parms_dict) # Appends the files to delete self.__filelist.append(self.__result_set_info.get_filename()) self.__rowcount = self.__result_set_info.get_row_count() AceQLDebug.debug("self.rowcount: " + str(self.__rowcount)) AceQLDebug.debug("filename : " + self.__result_set_info.get_filename()) # first build the description for Cursor.description self.__build_description() self.__row_parser = RowParser(self.__result_set_info.get_filename(), self.__result_set_info.get_row_count())
def execute_server_query(self, server_query_executor_class_name: str, parameters: List): """Executes a remote server query that returns a SELECT JDBC ResultSet on the remote database :param server_query_executor_class_name: the remote ServerQueryExecutor interface implementation name with package info :param parameters: the parameters to pass to the remote ServerQueryExecutor.executeQuery() implementation. """ if not ConnectionUtil.is_get_database_info_supported(self.__connection): raise Exception("AceQL Server version must be >= " + ConnectionUtil.EXECUTE_SERVER_QUERY_MIN_SERVER_VERSION + " in order to call execute_server_query()") self.__raise_error_if_closed() self.row_count = 0 self.__description: list = [] self.__result_set_info = self.__aceql_http_api.execute_server_query(server_query_executor_class_name, parameters) # Appends the files to delete self.__filelist.append(self.__result_set_info.get_filename()) self.__rowcount = self.__result_set_info.get_row_count() AceQLDebug.debug("self.rowcount: " + str(self.__rowcount)) AceQLDebug.debug("filename : " + self.__result_set_info.get_filename()) # first build the description for Cursor.description self.__build_description() self.__row_parser = RowParser(self.__result_set_info.get_filename(), self.__result_set_info.get_row_count())
def __execute_update(self, sql: str, params: tuple = ()) -> int: """Executes and update operation on remote database""" blob_streams: list = [] try: the_cursor_util: CursorUtil = CursorUtil() parms_dict: dict = the_cursor_util.get_http_parameters_dict(params) blob_ids = the_cursor_util.blob_ids blob_streams = the_cursor_util.blob_streams blob_lengths = the_cursor_util.blob_lengths cpt = 0 for blob_id in blob_ids: self.__aceql_http_api.blob_upload(blob_id, blob_streams[cpt], blob_lengths[cpt]) cpt += 1 AceQLDebug.debug("parms_dict: " + str(parms_dict)) is_prepared_statement = False if len(parms_dict) > 0: is_prepared_statement = True rows: int = self.__aceql_http_api.execute_update(sql, is_prepared_statement, parms_dict) self.__rowcount = rows return rows finally: for blob_stream in blob_streams: blob_stream.close()
def __build_description(self): """ Builds the .description property""" self.__raise_error_if_closed() self.__description: list = [] if self.__rowcount < 1: return row_parser: RowParser = None try: row_parser = RowParser(self.__result_set_info.get_filename(), self.__rowcount) row_parser.build_next_row() # read first row to get the column names, only way to do it... aceql_types: dict = row_parser.get_types_per_col_index() aceql_names: dict = row_parser.column_names_per_index() AceQLDebug.debug("aceql_types : " + str(aceql_types)) AceQLDebug.debug("aceql_names: " + str(aceql_names)) index = 0 while index < len(aceql_types): name_and_type = list() # name_and_type.append(aceql_names[index]) # name_and_type.append(aceql_types[index]) aceql_name = aceql_names.get(index) if aceql_name is not None: name_and_type.append(aceql_name) name_and_type.append(aceql_types.get(index)) # Append 5 non set values name_and_type.append(None) name_and_type.append(None) name_and_type.append(None) name_and_type.append(None) name_and_type.append(None) the_tup = tuple(name_and_type) self.__description.append(the_tup) index += 1 finally: if row_parser is not None: row_parser.close()
def get_blob_length(self, blob_id: str) -> int: """ Gets the blob length. """ try: if blob_id is None: raise TypeError("blob_id is null!") action = "get_blob_length" dict_params: dict = {"blob_id": blob_id} url_withaction = self._url + action AceQLDebug.debug("urlWithaction: " + url_withaction) AceQLDebug.debug("dictParams : " + str(dict_params)) # r = requests.post('http://httpbin.org/post', data = {'key':'value'}) if self.__timeout is None: response = requests.post(url_withaction, headers=self.__headers, data=dict_params, proxies=self.__proxies, auth=self.__auth) else: response = requests.post(url_withaction, headers=self.__headers, data=dict_params, proxies=self.__proxies, auth=self.__auth, timeout=self.__timeout) self.__http_status_code = response.status_code result = response.text AceQLDebug.debug("result: " + result) result_analyzer = ResultAnalyzer(result, self.__http_status_code) if not result_analyzer.is_status_ok(): raise Error(result_analyzer.get_error_message(), result_analyzer.get_error_type(), None, None, self.__http_status_code) length_str = result_analyzer.get_value("length") AceQLDebug.debug("result: " + length_str + ":") return int(length_str) except Exception as e: if isinstance(e, Error): raise else: raise Error(str(e), 0, e, None, self.__http_status_code)
def is_status_ok(self) -> bool: """ Determines whether the SQL correctly executed on server side. <returns><c>true</c> if [is status ok]; otherwise, <c>false</c>.</returns> """ with open(self.__filename, mode="r", encoding="utf-8") as fd: status_ok = False while True: s = fd.readline() if s == '': break s = s.strip() AceQLDebug.debug(s) if s.startswith("\"status\":"): AceQLDebug.debug("status: " + s + "!") if s.endswith("\"OK\","): status_ok = True break if not status_ok: self.parse_error_keywords() return status_ok
def treat_result(self, filename: str): file_out = None if self.__aceQLHttpApi.is_gzip_result(): file_out = filename[0:len(filename) - 4] + ".ungzipped.txt" FileUtil.decompress(filename, file_out) if AceQLDebugParms.DELETE_FILES: CursorUtil.remove_file_safe(filename) else: file_out = filename AceQLDebug.debug("Before StreamResultAnalyzer") result_analyzer = StreamResultAnalyzer( file_out, self.__aceQLHttpApi.get_http_status_code()) if not result_analyzer.is_status_ok(): if AceQLDebugParms.DELETE_FILES: CursorUtil.remove_file_safe(filename) raise Error(result_analyzer.get_error_message(), result_analyzer.get_error_type(), None, None, self.__aceQLHttpApi.get_http_status_code()) row_counter = RowCounter(file_out) row_count = row_counter.count() result_set_info = ResultSetInfo(file_out, row_count) AceQLDebug.debug("Before resultSetInfo") return result_set_info
def get_blob_length(self, column_index: int) -> int: """ Gets the remote BLOB length on a column in the current row To be used if progress indicator needed. """ self.__raise_error_if_closed() if column_index is None: raise TypeError("column_index is null!") values_per_column_index: dict = self.__row_parser.get_values_per_col_index() AceQLDebug.debug("values_per_column_index: " + str(values_per_column_index)) if values_per_column_index is None: raise aceql.Error("Not positioned on a row. (No fetchone call done.)", 0, None, None, 200) blob_id = values_per_column_index[column_index] if AceQLDebugParms.DEBUG_ON: print("blob_id: " + str(blob_id)) Cursor.check_blob_id(blob_id, column_index) blob_length = self.__aceql_http_api.get_blob_length(blob_id) return blob_length
def build_next_row(self): """Build the dictionaries of values per column name & values per column index for the next row""" # No parse of course if no rows in file... if self.__row_count == 0: return False # Stop parsing if end of file reached if self.__rows_parsed >= self.__row_count: self.__column_names_per_index = {} self.__values_per_col_index = {} return False s = "" while True: line = self.__fd.readline() if line == '': break line = line.strip() if self.is_last_row(line): self.__last_row += 1 break # Case last row if self.__last_row == self.__row_count: if line == "]": self.__last_row += 1 break # doe not include closing array bracket if not line.startswith("]"): s += line if s == '': return # AceQLDebug.print("s:" + s) AceQLDebug.debug("") AceQLDebug.debug("s : " + s) AceQLDebug.debug("self.__last_row : " + str(self.__last_row)) s = s.replace("{", "") s = s.replace("}", "") s = "{" + s + "}" resp = json.loads(s, object_pairs_hook=collections.OrderedDict) index = 0 for key, value in resp.items(): # AceQLDebug.debug("key/value: " + str(key) + " " + str(value)) self.__column_names_per_index[index] = key x = CursorUtil.get_utf8_value(value) if str(x) == 'NULL': self.__values_per_col_index[index] = None else: self.__values_per_col_index[index] = value index += 1 # AceQLDebug.debug("key: %s , value: %s" % (key, self.__values_per_col_name [key])) self.__rows_parsed += 1 return True
def execute_update(self, sql: str, is_prepared_statement: bool, statement_parameters: dict): """Calls /execute_update API""" try: action = "execute_update" AceQLExecutionUtil.check_values(is_prepared_statement, sql) dict_params: dict = {"sql": sql} AceQLExecutionUtil.set_is_prepared_statement( dict_params, is_prepared_statement) url_withaction = self.__url + action AceQLDebug.debug("url_withaction: " + url_withaction) AceQLDebug.debug("dict_params 1: " + str(dict_params)) if statement_parameters is not None: if not isinstance(statement_parameters, dict): raise TypeError( "statement_parameters is not a dictionary!") dict_params.update(statement_parameters) AceQLDebug.debug("dictParams 2: " + str(dict_params)) # r = requests.post('http://httpbin.org/post', data = {'key':'value'}) # print("Before update request") if self.__aceQLHttpApi.get_timeout() is None: AceQLDebug.debug("UPDATE HERE 1") response: Request = requests.post( url_withaction, headers=self.__aceQLHttpApi.get_headers(), data=dict_params, proxies=self.__aceQLHttpApi.get_proxies(), auth=self.__aceQLHttpApi.get_auth()) else: AceQLDebug.debug("UPDATE HERE 2") response: Request = requests.post( url_withaction, headers=self.__aceQLHttpApi.get_headers(), data=dict_params, proxies=self.__aceQLHttpApi.get_proxies(), auth=self.__aceQLHttpApi.get_auth(), timeout=self.__aceQLHttpApi.get_timeout()) self.__aceQLHttpApi.set_http_status_code(response.status_code) result = response.text # print("self.__http_status_code: " + str(self.__http_status_code )) # print("result : " + str(result)) AceQLDebug.debug("result: " + result) result_analyzer = ResultAnalyzer( result, self.__aceQLHttpApi.get_http_status_code()) if not result_analyzer.is_status_ok(): raise Error(result_analyzer.get_error_message(), result_analyzer.get_error_type(), None, None, self.__aceQLHttpApi.get_http_status_code()) row_count = result_analyzer.get_int_value("row_count") return row_count except Exception as e: if isinstance(e, Error): raise else: raise Error(str(e), 0, e, None, self.__aceQLHttpApi.get_http_status_code())
def execute_batch(self, sql: str, batch_file_parameters: str): try: action = "prepared_statement_execute_batch" AceQLExecutionUtil.check_values(True, sql) url_withaction = self.__url + action AceQLDebug.debug("url_withaction: " + url_withaction) blob_id: str = os.path.basename(batch_file_parameters) length: int = os.path.getsize(batch_file_parameters) with open(batch_file_parameters, "rb") as fd: self.__aceQLHttpApi.blob_upload(blob_id, fd, length) dict_params: dict = {"sql": sql, "blob_id": blob_id} AceQLDebug.debug("dict_params: " + str(dict_params)) # r = requests.post('http://httpbin.org/post', data = {'key':'value'}) # print("Before update request") # if self.__timeout is None: # response: Request = requests.post(url_withaction, headers=self.__headers, data=dict_params, # proxies=self.__proxies, auth=self.__auth) # else: # response: Request = requests.post(url_withaction, headers=self.__headers, data=dict_params, # proxies=self.__proxies, auth=self.__auth, # timeout=self.__timeout) if self.__aceQLHttpApi.get_timeout() is None: response: Request = requests.post(url_withaction, headers=self.__aceQLHttpApi.get_headers(), data=dict_params, proxies=self.__aceQLHttpApi.get_proxies(), auth=self.__aceQLHttpApi.get_auth()) else: response: Request = requests.post(url_withaction, headers=self.__aceQLHttpApi.get_headers(), data=dict_params, proxies=self.__aceQLHttpApi.get_proxies(), auth=self.__aceQLHttpApi.get_auth(), timeout=self.__aceQLHttpApi.get_timeout()) self.__aceQLHttpApi.set_http_status_code(response.status_code) result = response.text # print("self.__http_status_code: " + str(self.__http_status_code )) # print("result : " + str(result)) AceQLDebug.debug("result: " + result) result_analyzer = ResultAnalyzer(result, self.__aceQLHttpApi.get_http_status_code()) if not result_analyzer.is_status_ok(): raise Error(result_analyzer.get_error_message(), result_analyzer.get_error_type(), None, None, self.__aceQLHttpApi.get_http_status_code()) update_counts_array_dto_schema = marshmallow_dataclass.class_schema(UpdateCountsArrayDto) update_counts_array_dto_back: UpdateCountsArrayDto = update_counts_array_dto_schema().loads( result) update_counts_array: List[int] = update_counts_array_dto_back.updateCountsArray return update_counts_array except Exception as e: if isinstance(e, Error): raise else: raise Error(str(e), 0, e, None, self.__aceQLHttpApi.get_http_status_code())
def execute_server_query(self, server_query_executor_class_name: str, parameters: list): """Calls /execute_server_query API""" try: action = "execute_server_query" dict_params = { "server_query_executor_class_name": server_query_executor_class_name } AceQLDebug.debug("dictParams 1: " + str(dict_params)) server_query_executor_dto_schema = marshmallow_dataclass.class_schema( ServerQueryExecutorDto) server_query_executor_dto: ServerQueryExecutorDto = ServerQueryExecutorDtoBuilder.build( server_query_executor_class_name, parameters) json_string: str = server_query_executor_dto_schema().dumps( server_query_executor_dto) dict_params["server_query_executor_dto"] = json_string url_withaction = self.__url + action AceQLDebug.debug("url_withaction: " + url_withaction) AceQLDebug.debug("parameters : " + str(parameters)) self.update_dict_params(dict_params) AceQLDebug.debug("dictParams 2: " + str(dict_params)) # r = requests.post('http://httpbin.org/post', data = {'key':'value'}) if self.__aceQLHttpApi.get_timeout() is None: AceQLDebug.debug("QUERY HERE 1") response: Request = requests.post( url_withaction, headers=self.__aceQLHttpApi.get_headers(), data=dict_params, proxies=self.__aceQLHttpApi.get_proxies(), auth=self.__aceQLHttpApi.get_auth()) else: AceQLDebug.debug("QUERY HERE 2") response: Request = requests.post( url_withaction, headers=self.__aceQLHttpApi.get_headers(), data=dict_params, proxies=self.__aceQLHttpApi.get_proxies(), auth=self.__aceQLHttpApi.get_auth(), timeout=self.__aceQLHttpApi.get_timeout()) AceQLDebug.debug("DONE!") self.__aceQLHttpApi.set_http_status_code(response.status_code) filename = FileUtil.build_result_set_file() AceQLDebug.debug("filename1: " + filename) # We dump the JSon stream into user.home/.kawansoft/tmp with open(filename, 'wb') as fd: for chunk in response.iter_content(chunk_size=2048): fd.write(chunk) AceQLDebug.debug("after open filename") result_set_info = self.treat_result(filename) return result_set_info except Exception as e: if isinstance(e, Error): raise else: raise Error(str(e), 0, e, None, self.__aceQLHttpApi.get_http_status_code())
def execute_query(self, sql: str, is_prepared_statement: bool, statement_parameters: dict): """Calls /execute_query API""" try: action = "execute_query" AceQLExecutionUtil.check_values(is_prepared_statement, sql) dict_params = {"sql": sql} AceQLExecutionUtil.set_is_prepared_statement( dict_params, is_prepared_statement) url_withaction = self.__url + action AceQLDebug.debug("url_withaction: " + url_withaction) AceQLDebug.debug("dictParams 1: " + str(dict_params)) if statement_parameters is not None: if not isinstance(statement_parameters, dict): raise TypeError("statementParameters is not a dictionary!") dict_params.update(statement_parameters) self.update_dict_params(dict_params) AceQLDebug.debug("dictParams 2: " + str(dict_params)) # r = requests.post('http://httpbin.org/post', data = {'key':'value'}) if self.__aceQLHttpApi.get_timeout() is None: AceQLDebug.debug("QUERY HERE 1") response: Request = requests.post( url_withaction, headers=self.__aceQLHttpApi.get_headers(), data=dict_params, proxies=self.__aceQLHttpApi.get_proxies(), auth=self.__aceQLHttpApi.get_auth()) else: AceQLDebug.debug("QUERY HERE 2") response: Request = requests.post( url_withaction, headers=self.__aceQLHttpApi.get_headers(), data=dict_params, proxies=self.__aceQLHttpApi.get_proxies(), auth=self.__aceQLHttpApi.get_auth(), timeout=self.__aceQLHttpApi.get_timeout()) AceQLDebug.debug("DONE!") self.__aceQLHttpApi.set_http_status_code(response.status_code) filename = FileUtil.build_result_set_file() AceQLDebug.debug("filename1: " + filename) # We dump the JSon stream into user.home/.kawansoft/tmp with open(filename, 'wb') as fd: for chunk in response.iter_content(chunk_size=2048): fd.write(chunk) AceQLDebug.debug("after open filename") result_set_info = self.treat_result(filename) return result_set_info except Exception as e: if isinstance(e, Error): raise else: raise Error(str(e), 0, e, None, self.__aceQLHttpApi.get_http_status_code())