def sync_table(sync_agent, uuid): ''' syncs row in the table to the remote for the given uuid ''' # check if uuid exists entry_exists = select_elements_in_table(sync_agent.conn_remote, "global_measurement_overview", ('uuid', ), where=("uuid", uuid), dict_cursor=False) local_content = select_elements_in_table(sync_agent.conn_local, "global_measurement_overview", ('*', ), where=("uuid", uuid), dict_cursor=True)[0] sync_mgr_queries.convert_SQL_raw_table_entry_to_python(local_content) del local_content['id'] local_content['table_synchronized'] = True if len(entry_exists) == False: insert_row_in_table(sync_agent.conn_remote, 'global_measurement_overview', tuple(local_content.keys()), tuple(local_content.values())) else: remote_content = select_elements_in_table( sync_agent.conn_remote, "global_measurement_overview", ('*', ), where=("uuid", uuid), dict_cursor=True)[0] sync_mgr_queries.convert_SQL_raw_table_entry_to_python( remote_content) del remote_content['id'] content_to_update = dict() for key in remote_content.keys(): if local_content[key] != remote_content[key]: content_to_update[key] = local_content[key] update_table(sync_agent.conn_remote, 'global_measurement_overview', content_to_update.keys(), content_to_update.values(), condition=("uuid", uuid)) update_table(sync_agent.conn_local, 'global_measurement_overview', ('table_synchronized', ), (True, ), condition=("uuid", uuid)) sync_agent.conn_local.commit() sync_agent.conn_remote.commit()
def add_sample(conn): sample, set_up, project = sample_info.sample, sample_info.set_up, sample_info.project if sample is not None and set_up is not None and project is not None: var_names = ('sample_info_hash', 'sample', 'set_up', 'project') var_values = (set_up + project + sample, sample, set_up, project) insert_row_in_table(conn, sample_info_queries.table_name, var_names, var_values, custom_statement='ON CONFLICT DO NOTHING')
def add_variable(conn, name, unit, category, step, value=0): # this will be the line where we set the value vals, last_update_id = var_sql_queries.update_val(conn, name=None, value=None) res = select_elements_in_table(conn, var_sql_queries.gen_table_overview_name(), ('name', ), where=('name', name)) if len(res) == 0: insert_row_in_table(conn, var_sql_queries.gen_table_overview_name(), ('name', 'unit', 'category', 'step'), (name, unit, category, step)) alter_table(conn, var_sql_queries.gen_table_content_name(), (name, ), ('FLOAT8',)) update_table(conn, var_sql_queries.gen_table_content_name(), (name,), (value,), condition=('id', last_update_id)) conn.commit() else: print('Variable {} already present, skipping.'.format(name))
def new_measurement(conn, exp_name): ''' insert new measurement in the measurement table Args: exp_name (str) : name of the experiment to be executed Returns: id, uuid, SQL_datatable : id and uuid of the new measurement and the tablename for raw data storage ''' uuid = generate_uuid() var_names = ('uuid', 'set_up', 'project', 'sample', 'creasted_by', 'exp_name') var_values = (uuid, str(sample_info.set_up), str(sample_info.project), str(sample_info.sample), SQL_conn_info_local.user, exp_name) returning = ('id', 'uuid') query_outcome = insert_row_in_table( conn, measurement_overview_queries.table_name, var_names, var_values, returning) SQL_datatable = ("_" + sample_info.set_up + "_" + sample_info.project + "_" + sample_info.sample + "_" + str(query_outcome[0][1])).replace(" ", "_").replace( '-', '_') return query_outcome[0][0], query_outcome[0][1], SQL_datatable
def _sync_raw_data_table(sync_agent, raw_data_table_name): n_row_loc = select_elements_in_table(sync_agent.conn_local, raw_data_table_name, (psycopg2.sql.SQL('COUNT(*)'), ), dict_cursor=False)[0][0] table_name = execute_query( sync_agent.conn_remote, "SELECT to_regclass('{}.{}');".format('public', raw_data_table_name))[0][0] n_row_rem = 0 if table_name is not None: n_row_rem = select_elements_in_table( sync_agent.conn_remote, raw_data_table_name, (psycopg2.sql.SQL('COUNT(*)'), ), dict_cursor=False)[0][0] if n_row_loc != n_row_rem or table_name == None: get_rid_of_table = "DROP TABLE IF EXISTS {} ; ".format( raw_data_table_name) execute_statement(sync_agent.conn_remote, get_rid_of_table) data_table_queries.generate_table(sync_agent.conn_remote, raw_data_table_name) res_loc = select_elements_in_table(sync_agent.conn_local, raw_data_table_name, ('*', ), order_by=('id', '')) for result in res_loc: lobject = sync_agent.conn_remote.lobject(0, 'w') del result['id'] result['oid'] = lobject.oid result['write_cursor'] = 0 result['depencies'] = json.dumps(result['depencies']) result['shape'] = json.dumps(result['shape']) insert_row_in_table(sync_agent.conn_remote, raw_data_table_name, result.keys(), result.values()) sync_agent.conn_remote.commit()
def insert_measurement_spec_in_meas_table(conn, table_name, data_item): ''' instert all the info of the set and get parameters in the measurement table. Args: measurement_table (str) : name of the measurement table data_item (m_param_raw) : raw format of the measurement parameter ''' var_names = ("param_id", "nth_set", "nth_dim", "param_id_m_param", "setpoint", "setpoint_local", "name_gobal", "name", "label", "unit", "depencies", "shape", "write_cursor", "total_size", "oid") var_values = (data_item.param_id, data_item.nth_set, data_item.nth_dim, data_item.param_id_m_param, data_item.setpoint, data_item.setpoint_local, data_item.name_gobal, data_item.name, data_item.label, data_item.unit, psycopg2.extras.Json(data_item.dependency), psycopg2.extras.Json(data_item.shape), 0, data_item.size, data_item.oid) insert_row_in_table(conn, table_name, var_names, var_values)
def update_val(conn, name , value): all_vals = var_sql_queries.get_all_values(conn) if name is not None: all_vals[name] = value if all_vals is None: all_vals = dict() all_vals.pop('id', None) all_vals['insert_time'] = to_postgres_time(datetime.datetime.now()) my_id = insert_row_in_table(conn, var_sql_queries.gen_table_content_name(), tuple(all_vals.keys()), tuple(all_vals.values()), returning=('id', ))[0] conn.commit() return all_vals, my_id
def sync_table(sync_agent, uuid, to_local=False): ''' syncs row in the table to the remote for the given uuid Args: sync_agent: class holding local and remote connection uuid (int): unique id of measurement to_local (bool): if True syncs from remote to local server ''' if to_local: conn_src = sync_agent.conn_remote conn_dest = sync_agent.conn_local else: conn_src = sync_agent.conn_local conn_dest = sync_agent.conn_remote # check if uuid exists entry_exists = select_elements_in_table(conn_dest, "global_measurement_overview", ('uuid', ), where=("uuid", uuid), dict_cursor=False) source_content = select_elements_in_table( conn_src, "global_measurement_overview", ('*', ), where=("uuid", uuid), dict_cursor=True)[0] sync_mgr_queries.convert_SQL_raw_table_entry_to_python(source_content) del source_content['id'] source_content['table_synchronized'] = True if len(entry_exists) == 0: print('create measurement row', uuid) insert_row_in_table(conn_dest, 'global_measurement_overview', tuple(source_content.keys()), tuple(source_content.values())) else: print('update measurement row', uuid) dest_content = select_elements_in_table( conn_dest, "global_measurement_overview", ('*', ), where=("uuid", uuid), dict_cursor=True)[0] sync_mgr_queries.convert_SQL_raw_table_entry_to_python( dest_content) del dest_content['id'] content_to_update = dict() for key in dest_content.keys(): if source_content[key] != dest_content[key]: content_to_update[key] = source_content[key] update_table(conn_dest, 'global_measurement_overview', content_to_update.keys(), content_to_update.values(), condition=("uuid", uuid)) if not to_local: update_table(sync_agent.conn_local, 'global_measurement_overview', ('table_synchronized', ), (True, ), condition=("uuid", uuid)) conn_src.commit() conn_dest.commit()