def process_request(parentPID, name, queue, db_type, db_server, db_database, db_user, db_password, db_host, db_port, db_encoding, mod_count): con = None counter = 0 last_date = datetime.datetime.now() db_module = db_modules.get_db_module(db_type) while True: if parentPID and hasattr(os, 'getppid') and os.getppid() != parentPID: break request = queue.get() if request: result_queue = request['queue'] command = request['command'] params = request['params'] select = request['select'] cur_mod_count = request['mod_count'] date = datetime.datetime.now() hours = (date - last_date).total_seconds() // 3600 if cur_mod_count != mod_count or counter > 1000 or hours >= 1: if con: try: con.rollback() con.close() except: pass con = None mod_count = cur_mod_count counter = 0 last_date = date con, result = execute_sql(db_module, db_server, db_database, db_user, db_password, db_host, db_port, db_encoding, con, command, params, select) counter += 1 result_queue.put(result)
def create_index_sql(self, db_type, table_name, fields=None, new_fields=None, foreign_key_dict=None): def new_field_name_by_id(id_value): for f in new_fields: if f['id'] == id_value: return f['field_name'] db_module = db_modules.get_db_module(db_type) index_name = self.f_index_name.value if self.f_foreign_index.value: if foreign_key_dict: key = foreign_key_dict['key'] ref = foreign_key_dict['ref'] primary_key = foreign_key_dict['primary_key'] else: fields = self.task.sys_fields.copy() fields.set_where(id=self.f_foreign_field.value) fields.open() key = fields.f_db_field_name.value ref_id = fields.f_object.value items = self.task.sys_items.copy() items.set_where(id=ref_id) items.open() ref = items.f_table_name.value primary_key = items.f_primary_key.value fields.set_where(id=primary_key) fields.open() primary_key = fields.f_db_field_name.value sql = db_module.create_foreign_index_sql(table_name, index_name, key, ref, primary_key) else: index_fields = self.f_fields_list.value desc = '' if self.descending.value: desc = 'DESC' unique = '' if self.f_unique_index.value: unique = 'UNIQUE' fields = common.load_index_fields(index_fields) if db_type == db_modules.FIREBIRD: if new_fields: field_defs = [new_field_name_by_id(field[0]) for field in fields] else: field_defs = [self.task.sys_fields.field_by_id(field[0], 'f_db_field_name') for field in fields] field_str = '"' + '", "'.join(field_defs) + '"' else: field_defs = [] for field in fields: if new_fields: field_name = new_field_name_by_id(field[0]) else: field_name = self.task.sys_fields.field_by_id(field[0], 'f_db_field_name') d = '' if field[1]: d = 'DESC' field_defs.append('"%s" %s' % (field_name, d)) field_str = ', '.join(field_defs) sql = db_module.create_index_sql(index_name, table_name, unique, field_str, desc) #~ print(sql) return sql
def delete_table_sql(self, db_type): db_module = db_modules.get_db_module(db_type) gen_name = None if self.f_primary_key.value: gen_name = self.f_gen_name.value result = db_module.delete_table_sql(self.f_table_name.value, gen_name) for i, s in enumerate(result): print(result[i]) return result
def delete_index_sql(self, db_type, table_name=None): db_module = db_modules.get_db_module(db_type) if not table_name: table_name = self.task.sys_items.field_by_id(self.owner_rec_id.value, 'f_table_name') index_name = self.f_index_name.value if self.f_foreign_index.value: sql = db_module.delete_foreign_index(table_name, index_name) else: sql = db_module.delete_index(table_name, index_name) #~ print(sql) return sql
def delete_index_sql(self, db_type, table_name=None): db_module = db_modules.get_db_module(db_type) if not table_name: table_name = self.task.sys_items.field_by_id( self.owner_rec_id.value, 'f_table_name') index_name = self.f_index_name.value if self.f_foreign_index.value: sql = db_module.delete_foreign_index(table_name, index_name) else: sql = db_module.delete_index(table_name, index_name) #~ print(sql) return sql
def change_table_sql(self, db_type, old_fields, new_fields): def recreate(comp): for key, (old_field, new_field) in iteritems(comp): if old_field and new_field: if old_field['field_name'] != new_field['field_name']: return True elif old_field['default_value'] != new_field[ 'default_value']: return True elif old_field and not new_field: return True db_module = db_modules.get_db_module(db_type) table_name = self.f_table_name.value result = [] comp = {} for field in old_fields: comp[field['id']] = [field, None] for field in new_fields: if comp.get(field['id']): comp[field['id']][1] = field else: if field['id']: comp[field['id']] = [None, field] else: comp[field['field_name']] = [None, field] if db_type == db_modules.SQLITE and recreate(comp): result += self.recreate_table_sql(db_type, old_fields, new_fields) else: for key, (old_field, new_field) in iteritems(comp): if old_field and not new_field and db_type != db_modules.SQLITE: result.append( db_module.del_field_sql(table_name, old_field)) for key, (old_field, new_field) in iteritems(comp): if old_field and new_field and db_type != db_modules.SQLITE: if (old_field['field_name'] != new_field['field_name']) or \ (db_module.FIELD_TYPES[old_field['data_type']] != db_module.FIELD_TYPES[new_field['data_type']]) or \ (old_field['default_value'] != new_field['default_value']) or \ (old_field['size'] != new_field['size']): sql = db_module.change_field_sql( table_name, old_field, new_field) if type(sql) in (list, tuple): result += sql else: result.append() for key, (old_field, new_field) in iteritems(comp): if not old_field and new_field: result.append( db_module.add_field_sql(table_name, new_field)) for i, s in enumerate(result): print(result[i]) return result
def copy_database(self, dbtype, database=None, user=None, password=None, host=None, port=None, encoding=None, limit = 4048): connection = None db_module = db_modules.get_db_module(dbtype) for group in self.items: for it in group.items: if it.item_type != 'report': item = it.copy(handlers=False, filters=False, details=False) if item.table_name and not item.virtual_table: self.execute('DELETE FROM "%s"' % item.table_name) item.open(expanded=False, open_empty=True) params = {'__fields': [], '__filters': [], '__expanded': False, '__offset': 0, '__limit': 0} sql = item.get_record_count_query(params, db_module) connection, (result, error) = \ execute_sql(db_module, database, user, password, host, port, encoding, connection, sql, params=None, select=True) record_count = result[0][0] loaded = 0 max_id = 0 if record_count: while True: params['__offset'] = loaded params['__limit'] = limit sql = item.get_select_statement(params, db_module) connection, (result, error) = \ execute_sql(db_module, database, user, password, host, port, encoding, connection, sql, params=None, select=True) if not error: for i, r in enumerate(result): item.append() j = 0 for field in item.fields: if not field.master_field: field.value = r[j] j += 1 if item._primary_key_field.value > max_id: max_id = item._primary_key_field.value item.post() item.apply() else: raise Exception(error) records = len(result) loaded += records print('coping table %s: %d%%' % (item.item_name, int(loaded * 100 / record_count))) if records == 0 or records < limit: break if item.gen_name: sql = self.db_module.restart_sequence_sql(item.gen_name, max_id + 1) self.execute(sql)
def create_index_sql(self, db_type, table_name, fields=None, new_fields=None, foreign_key_dict=None): def new_field_name_by_id(id_value): for f in new_fields: if f['id'] == id_value: return f['field_name'] db_module = db_modules.get_db_module(db_type) index_name = self.f_index_name.value if self.f_foreign_index.value: if foreign_key_dict: key = foreign_key_dict['key'] ref = foreign_key_dict['ref'] primary_key = foreign_key_dict['primary_key'] else: fields = self.task.sys_fields.copy() fields.set_where(id=self.f_foreign_field.value) fields.open() key = fields.f_field_name.value ref_id = fields.f_object.value items = self.task.sys_items.copy() items.set_where(id=ref_id) items.open() ref = items.f_table_name.value primary_key = items.f_primary_key.value fields.set_where(id=primary_key) fields.open() primary_key = fields.f_field_name.value sql = db_module.create_foreign_index_sql(table_name, index_name, key, ref, primary_key) else: index_fields = self.f_fields.value desc = '' if self.descending.value: desc = 'DESC' unique = '' if self.f_unique_index.value: unique = 'UNIQUE' fields = common.load_index_fields(index_fields) if new_fields: fields = [new_field_name_by_id(field[0]) for field in fields] else: fields = [self.task.sys_fields.field_by_id(field[0], 'f_field_name') for field in fields] if desc and db_type in (db_modules.SQLITE, db_modules.POSTGRESQL): fields = ['"%s" %s' % (field, desc) for field in fields] field_str = ', '.join(fields) else: field_str = '"' + '", "'.join(fields) + '"' sql = db_module.create_index_sql(index_name, table_name, unique, field_str, desc) print(sql) return sql
def change_table_sql(self, db_type, old_fields, new_fields): def recreate(comp): for key, (old_field, new_field) in iteritems(comp): if old_field and new_field: if old_field['field_name'] != new_field['field_name']: return True elif old_field['default_value'] != new_field['default_value']: return True elif old_field and not new_field: return True db_module = db_modules.get_db_module(db_type) table_name = self.f_table_name.value result = [] comp = {} for field in old_fields: comp[field['id']] = [field, None] for field in new_fields: if comp.get(field['id']): comp[field['id']][1] = field else: if field['id']: comp[field['id']] = [None, field] else: comp[field['field_name']] = [None, field] if db_type == db_modules.SQLITE and recreate(comp): result += self.recreate_table_sql(db_type, old_fields, new_fields) else: for key, (old_field, new_field) in iteritems(comp): if old_field and not new_field and db_type != db_modules.SQLITE: result.append(db_module.del_field_sql(table_name, old_field)) for key, (old_field, new_field) in iteritems(comp): if old_field and new_field and db_type != db_modules.SQLITE: if (old_field['field_name'] != new_field['field_name']) or \ (db_module.FIELD_TYPES[old_field['data_type']] != db_module.FIELD_TYPES[new_field['data_type']]) or \ (old_field['default_value'] != new_field['default_value']) or \ (old_field['size'] != new_field['size']): sql = db_module.change_field_sql(table_name, old_field, new_field) if type(sql) in (list, tuple): result += sql else: result.append() for key, (old_field, new_field) in iteritems(comp): if not old_field and new_field: result.append(db_module.add_field_sql(table_name, new_field)) for i, s in enumerate(result): print(result[i]) return result
def create_table_sql(self, db_type, table_name, fields=None, foreign_fields=None): if not fields: fields = [] for field in self.fields: if not (field.calculated or field.master_field): dic = {} dic['id'] = field.ID dic['field_name'] = field.field_name dic['data_type'] = field.data_type dic['size'] = field.field_size dic['default_value'] = field.f_default_value.value dic['primary_key'] = field.id.value == item.f_primary_key.value fields.append(dic) result = [] db_module = db_modules.get_db_module(db_type) result = db_module.create_table_sql(table_name, fields, foreign_fields) for i, s in enumerate(result): print(result[i]) return result
def create_table_sql(self, db_type, table_name, fields=None, gen_name=None, foreign_fields=None): if not fields: fields = [] for field in self.fields: if not (field.calculated or field.master_field): dic = {} dic['id'] = field.ID dic['field_name'] = field.db_field_name dic['data_type'] = field.data_type dic['size'] = field.field_size dic['default_value'] = ''#field.f_default_value.value dic['primary_key'] = field.id.value == item.f_primary_key.value fields.append(dic) result = [] db_module = db_modules.get_db_module(db_type) result = db_module.create_table_sql(table_name, fields, gen_name, foreign_fields) for i, s in enumerate(result): print(result[i]) return result
def __init__(self, app, name, caption, js_filename, db_type, db_server = '', db_database = '', db_user = '', db_password = '', host='', port='', encoding='', con_pool_size=1, mp_pool=False, persist_con=False): AbstrTask.__init__(self, None, None, None, None) self.app = app self.consts = Consts() self.items = [] self.lookup_lists = {} self.ID = None self.item_name = name self.item_caption = caption self.js_filename = js_filename self.db_type = db_type self.db_server = db_server self.db_database = db_database self.db_user = db_user self.db_password = db_password self.db_host = host self.db_port = port self.db_encoding = encoding self.db_module = db_modules.get_db_module(self.db_type) self.on_before_request = None self.on_after_request = None self.on_open = None self.on_apply = None self.on_count = None self.work_dir = os.getcwd() self.con_pool_size = 0 self.mod_count = 0 self.modules = [] self.conversion_lock = threading.Lock() self.con_pool_size = con_pool_size self.mp_pool = mp_pool self.persist_con = persist_con self.con_counter = ConCounter() #~ self.persist_con_busy = 0 if self.mp_pool: if self.persist_con: self.create_connection_pool(1) self.create_mp_connection_pool(self.con_pool_size) else: self.create_connection_pool(self.con_pool_size)
def create_table_sql(self, db_type, table_name, fields=None, gen_name=None, foreign_fields=None): if not fields: fields = [] for field in self.fields: if not field.master_field: dic = {} dic['id'] = field.ID dic['field_name'] = field.db_field_name dic['data_type'] = field.data_type dic['size'] = field.field_size dic['default_value'] = '' dic['primary_key'] = field.id.value == item.f_primary_key.value fields.append(dic) result = [] db_module = db_modules.get_db_module(db_type) result = db_module.create_table_sql(table_name, fields, gen_name, foreign_fields) return result
def process_request(parentPID, name, queue, db_type, db_server, db_database, db_user, db_password, db_host, db_port, db_encoding, mod_count): con = None counter = 0 last_date = datetime.datetime.now() db_module = db_modules.get_db_module(db_type) while True: if parentPID and hasattr(os, 'getppid') and os.getppid() != parentPID: break request = queue.get() if request: result_queue = request['queue'] command = request['command'] params = request['params'] call_proc = request['call_proc'] select = request['select'] cur_mod_count = request['mod_count'] date = datetime.datetime.now() hours = (date - last_date).total_seconds() // 3600 if cur_mod_count != mod_count or counter > 1000 or hours >= 1: if con: try: con.rollback() con.close() except: pass con = None mod_count = cur_mod_count counter = 0 last_date = date con, result = execute_sql(db_module, db_server, db_database, db_user, db_password, db_host, db_port, db_encoding, con, command, params, call_proc, select) counter += 1 result_queue.put(result)
def delete_table_sql(self, db_type): db_module = db_modules.get_db_module(db_type) result = db_module.delete_table_sql(self.f_table_name.value, self.f_gen_name.value) for i, s in enumerate(result): print(result[i]) return result
def copy_database(self, dbtype, database=None, user=None, password=None, host=None, port=None, encoding=None, server=None, limit = 4096): def convert_sql(item, sql, db_module): new_case = item.task.db_module.identifier_case old_case = db_module.identifier_case if old_case('a') != new_case('a'): if new_case(item.table_name) == item.table_name: sql = sql.replace(item.table_name, old_case(item.table_name)) for field in item.fields: if new_case(field.db_field_name) == field.db_field_name and \ not field.db_field_name.upper() in common.SQL_KEYWORDS: field_name = '"%s"' % field.db_field_name sql = sql.replace(field_name, old_case(field_name)) return sql print('copying started') connection = None db_module = db_modules.get_db_module(dbtype) print('copying droping indexes') self.drop_indexes() if hasattr(self.db_module, 'set_foreign_keys'): self.execute(self.db_module.set_foreign_keys(False)) try: for group in self.items: for it in group.items: if it.item_type != 'report': item = it.copy(handlers=False, filters=False, details=False) if item.table_name and not item.virtual_table: print('copying table %s' % item.item_name) params = {'__expanded': False, '__offset': 0, '__limit': 0, '__filters': []} rec_count, mess = item.get_record_count(params) sql = item.get_record_count_query(params, db_module) sql = convert_sql(item, sql, db_module) connection, (result, error) = \ execute_sql(db_module, server, database, user, password, host, port, encoding, connection, sql, params=None, select=True) record_count = result[0][0] loaded = 0 max_id = 0 item.open(expanded=False, open_empty=True) if record_count and rec_count != record_count: self.execute('DELETE FROM "%s"' % item.table_name) while True: params = {'__expanded': False, '__offset': loaded, '__limit': limit, '__fields': [], '__filters': []} sql = item.get_select_statement(params, db_module) sql = convert_sql(item, sql, db_module) connection, (result, error) = \ execute_sql(db_module, server, database, user, password, host, port, encoding, connection, sql, params=None, select=True) if not error: for i, r in enumerate(result): item.append() j = 0 for field in item.fields: if not field.master_field: field.set_data(r[j]) j += 1 if item._primary_key and item._primary_key_field.value > max_id: max_id = item._primary_key_field.value item.post() item.apply() else: raise Exception(error) records = len(result) loaded += records print('copying table %s: %d%%' % (item.item_name, int(loaded * 100 / record_count))) if records == 0 or records < limit: break if item.gen_name: sql = self.db_module.restart_sequence_sql(item.gen_name, max_id + 1) self.execute(sql) finally: print('copying restoring indexes') self.restore_indexes() if hasattr(self.db_module, 'set_foreign_keys'): self.execute(self.db_module.set_foreign_keys(True)) print('copying finished')