def run_tbl_operations(conn_params, form_type, get_data={}, post_data={}): # generate / derive queries to be run queries, msg = [], '' if form_type == 'tbl_edit_form': queries, msg = sql.alter_table(conn_params['dialect'], get_data, post_data) elif form_type == 'tbl_vacuum_form': queries = sql.pg_vacuum_stmt(get_data, post_data) else: # implemented queries: analyze table and reindex table # only the pg dialect queries = sql.generate_query(form_type, conn_params['dialect'], get_data) # execute queries and return formatted status messages if form_type == 'tbl_edit_form': ret = sa.short_query(conn_params, queries) else: ret = sa.execute_outside_transaction(conn_params, queries) if ret['status'] == 'success': if form_type == 'tbl_edit_form': ret['msg'] = msg + ' change succesfull' else: msg = '{command} operation ran succesfully' if form_type == 'tbl_vacuum_form': com = 'VACUUM' elif form_type == 'analyze_table': com = 'ANALYZE' elif form_type == 'reindex_table': com = 'REINDEX' ret['msg'] = msg.format(command=com) ret['msg'] = '<p class="query-msg {status}">{msg}</p>'.format( status = 'error' if ret['status'] == 'error' else 'success', msg = ret['msg'] ) return ret
def rpr_query(conn_params, query_type, get_data={}, post_data={}): ''' Run queries that have to be generated on the fly. Most queries depends on get_data, while some few depends on post_data get_data and post_data are gotten from request.GET and request.POST or form.cleaned_data some functions in this file should have been (and previously have been) in this function. but the contents of a function are not indexed by IDEs and that makes debugging difficult ''' # common queries that returns success state as a dict only no_return_queries = ('create_user', 'drop_user', 'create_db','create_table', 'drop_table', 'empty_table', 'delete_row', 'create_column', 'drop_column', 'drop_db', 'drop_sequence', 'reset_sequence', 'drop_constraint', ) psycopg2_queries = ('drop_db', ) if query_type in no_return_queries: conn_params['db'] = get_data['db'] if get_data.has_key('db') else conn_params['db'] query_data = {} query_data.update(get_data, **post_data) queries = sql.generate_query( query_type, conn_params['dialect'],query_data) if conn_params['dialect'] == 'postgresql' and query_type in psycopg2_queries: # this queries needs to be run outside a transaction block # SA execute functions runs all its queries inside a transaction block result = sa.execute_outside_transaction(conn_params, queries) else: result = sa.short_query(conn_params, queries) return HttpResponse( json.dumps(result) ) # specific queries with implementations similar to both dialects elif query_type in ('indexes', 'primary_keys', 'foreign_key_relation'): if conn_params['dialect'] == 'postgresql' and query_type == 'indexes': return get_constraints(conn_params, query_type, get_data) r = sa.full_query(conn_params, sql.generate_query(query_type, conn_params['dialect'], get_data)[0]) return r elif query_type in ('get_single_row',): sub_q_data = {'tbl': get_data['tbl'],'db':get_data['db']} if get_data.has_key('schm'): sub_q_data['schm'] = get_data['schm'] # generate where statement sub_q_data['where'] = "" for ind in range(len(post_data)): sub_q_data['where'] += post_data.keys()[ind].strip() + "=" val = post_data.values()[ind].strip() sub_q_data['where'] += fns.quote(val) if ind != len(post_data) - 1: sub_q_data['where'] += ' AND ' # retrieve and run queries conn_params['db'] = get_data['db'] # assert False q = sql.generate_query(query_type, conn_params['dialect'], sub_q_data) r = sa.full_query(conn_params, q[0]) return r elif query_type in ('table_rpr', 'table_structure', 'raw_table_structure', 'seqs_rpr'): sub_q_data = {'db': get_data['db'],} if get_data.has_key('tbl'): sub_q_data['tbl'] = get_data['tbl'] if get_data.has_key('schm'): sub_q_data['schm'] = get_data['schm'] # make query if conn_params['dialect'] == 'postgresql' and query_type == 'raw_table_structure': q = 'table_structure' else: q = query_type r = sa.full_query(conn_params, sql.generate_query(q, conn_params['dialect'], sub_q_data)[0] ) # further needed processing if conn_params['dialect'] == 'postgresql' and query_type.count('table_structure'): rwz = [] for tuple_row in r['rows']: row = list(tuple_row) data_type_str = row[1] _l = [ data_type_str ] datetime_precision = row[7] if data_type_str in ('bit', 'bit varying', 'character varying', 'character') and type(row[4]) is int: _l.append( '({0})'.format(row[4]) ) # elif data_type_str in ('numeric', 'decimal') and type(row[5]) is int or type(row[6]) is int: elif data_type_str in ('numeric', 'decimal'): numeric_precision, numeric_scale = row[5], row[6] _l.append( '(%d,%d)' % (numeric_precision, numeric_scale) ) # interval types should need some revising elif data_type_str in ('interval', ): _l.append( '(%d)' % datetime_precision ) # time and timestamps have a somewhat different declarative syntax elif datetime_precision is int and (data_type_str.startswith('time') or data_type_str.startswith('timestamp')): _in = 9 if data_type_str.startswith('timestamp') else 4 # the length of time and timestamp respectively _l = [ data_type_str[:_in], '(%d)' % datetime_precision , data_type_str[_in:]] # append the current row to rwz if query_type == 'table_structure': rwz.append([row[0], "".join(_l), row[2], row[3] ]) elif query_type == 'raw_table_structure': row.append("".join(_l)) rwz.append(row) # change r['rows'] r['rows'] = rwz # change r['columns'] if query_type == 'table_structure': r['columns'] = [ r['columns'][0], r['columns'][1], r['columns'][2], r['columns'][3] ] elif query_type == 'raw_table_structure': r['columns'].append('column_type') return r # queries with dissimilar implementations elif conn_params['dialect'] == 'mysql': if query_type == 'describe_databases': conn_params['db'] = 'INFORMATION_SCHEMA'; query = sql.stored_query(query_type, conn_params['dialect']) return sa.full_query(conn_params, query) else: return fns.http_500('query not yet implemented!') else: return fns.http_500('dialect not supported!')