def update_cursors_in_meas_tab(conn, table_name, data_items): statement = "" for i in range(len(data_items)): statement += "UPDATE {} SET write_cursor = {} WHERE id = {}; ".format( table_name, data_items[i].data_buffer.cursor, i + 1) execute_statement(conn, statement)
def remove_variable(conn, variable_name): statement_1 = sql.SQL("DELETE FROM {} WHERE {} = {} returning name").format(sql.SQL(var_sql_queries.gen_table_overview_name()), sql.Identifier('name'),sql.Literal(variable_name)) statement_2 = sql.SQL("ALTER TABLE {} DROP COLUMN IF EXISTS {}").format(sql.SQL(var_sql_queries.gen_table_content_name()), sql.Identifier(variable_name)) res = execute_query(conn, statement_1) execute_statement(conn, statement_2) if len(res) == 0: print('Nothing to remove. {} is not present in the database?'.format(variable_name))
def generate_table(conn): statement = "CREATE TABLE if not EXISTS {} (".format(sample_info_queries.table_name) statement += "sample_info_hash text NOT NULL UNIQUE," statement += "set_up text NOT NULL," statement += "project text NOT NULL," statement += "sample text NOT NULL );" execute_statement(conn, statement)
def init_table(conn): statement = statement = "CREATE TABLE if not EXISTS {} (".format( var_sql_queries.gen_table_overview_name()) statement += "name text NOT NULL UNIQUE," statement += "unit text NOT NULL," statement += "step FLOAT8 NOT NULL," statement += "category text NOT NULL );" execute_statement(conn, statement) statement = "CREATE TABLE if not EXISTS {} (id SERIAL, insert_time TIMESTAMP );".format( var_sql_queries.gen_table_content_name()) execute_statement(conn, statement) conn.commit()
def _sync_raw_data_table(sync_agent, raw_data_table_name): n_row_loc = select_elements_in_table(sync_agent.conn_local, raw_data_table_name, (psycopg2.sql.SQL('COUNT(*)'), ), dict_cursor=False)[0][0] table_name = execute_query( sync_agent.conn_remote, "SELECT to_regclass('{}.{}');".format('public', raw_data_table_name))[0][0] n_row_rem = 0 if table_name is not None: n_row_rem = select_elements_in_table( sync_agent.conn_remote, raw_data_table_name, (psycopg2.sql.SQL('COUNT(*)'), ), dict_cursor=False)[0][0] if n_row_loc != n_row_rem or table_name == None: get_rid_of_table = "DROP TABLE IF EXISTS {} ; ".format( raw_data_table_name) execute_statement(sync_agent.conn_remote, get_rid_of_table) data_table_queries.generate_table(sync_agent.conn_remote, raw_data_table_name) res_loc = select_elements_in_table(sync_agent.conn_local, raw_data_table_name, ('*', ), order_by=('id', '')) for result in res_loc: lobject = sync_agent.conn_remote.lobject(0, 'w') del result['id'] result['oid'] = lobject.oid result['write_cursor'] = 0 result['depencies'] = json.dumps(result['depencies']) result['shape'] = json.dumps(result['shape']) insert_row_in_table(sync_agent.conn_remote, raw_data_table_name, result.keys(), result.values()) sync_agent.conn_remote.commit()
def generate_table(conn): statement = "CREATE TABLE if not EXISTS {} (".format( measurement_overview_queries.table_name) statement += "id SERIAL," statement += "uuid BIGINT NOT NULL unique," statement += "exp_name text NOT NULL," statement += "set_up text NOT NULL," statement += "project text NOT NULL," statement += "sample text NOT NULL," statement += "creasted_by text NOT NULL," statement += "start_time TIMESTAMP, " statement += "stop_time TIMESTAMP, " statement += "exp_data_location text," statement += "snapshot BYTEA, " statement += "metadata BYTEA," statement += "keywords JSONB, " statement += "starred BOOL DEFAULT False, " statement += "completed BOOL DEFAULT False, " statement += "data_size int," statement += "data_cleared BOOL DEFAULT False, " statement += "data_synchronized BOOL DEFAULT False," statement += "table_synchronized BOOL DEFAULT False," statement += "sync_location text); " statement += "CREATE INDEX IF NOT EXISTS uuid_indexed ON {} USING BTREE (uuid) ;".format( measurement_overview_queries.table_name) statement += "CREATE INDEX IF NOT EXISTS starred_indexed ON {} USING BTREE (starred) ;".format( measurement_overview_queries.table_name) statement += "CREATE INDEX IF NOT EXISTS date_day_index ON {} USING BTREE (project, set_up, sample) ;".format( measurement_overview_queries.table_name) statement += "CREATE INDEX IF NOT EXISTS data_synced_index ON {} USING BTREE (data_synchronized);".format( measurement_overview_queries.table_name) statement += "CREATE INDEX IF NOT EXISTS table_synced_index ON {} USING BTREE (table_synchronized);".format( measurement_overview_queries.table_name) execute_statement(conn, statement)
def generate_table(conn, table_name): statement = "CREATE TABLE if not EXISTS {} ( ".format(table_name ) statement += "id SERIAL primary key, " statement += "param_id BIGINT, " statement += "nth_set INT, " statement += "nth_dim INT, " statement += "param_id_m_param BIGINT, " statement += "setpoint BOOL, " statement += "setpoint_local BOOL, " statement += "name_gobal text, " statement += "name text NOT NULL," statement += "label text NOT NULL," statement += "unit text NOT NULL," statement += "depencies jsonb, " statement += "shape jsonb, " statement += "write_cursor INT, " statement += "total_size INT, " statement += "oid INT, " statement += "synchronized BOOL DEFAULT False," statement += "sync_location text);" execute_statement(conn, statement)
def change_column_name(conn, old, new): statement = sql.SQL('ALTER TABLE {} RENAME COLUMN {} TO {};').format(sql.SQL(var_sql_queries.gen_table_content_name()), sql.Identifier(old), sql.Identifier(new)) execute_statement(conn, statement) conn.commit()