예제 #1
0
    def sync_raw_data(sync_agent, uuid, to_local=False):
        if to_local:
            conn_src = sync_agent.conn_remote
            conn_dest = sync_agent.conn_local
        else:
            conn_src = sync_agent.conn_local
            conn_dest = sync_agent.conn_remote

        raw_data_table_name = select_elements_in_table(
            conn_src,
            'global_measurement_overview', ('exp_data_location', ),
            where=("uuid", uuid),
            dict_cursor=False)[0][0]

        #        data_table_queries.generate_table(sync_agent.conn_local, raw_data_table_name)
        sync_mgr_queries._sync_raw_data_table(conn_src, conn_dest,
                                              raw_data_table_name)
        sync_mgr_queries._sync_raw_data_lobj(conn_src, conn_dest,
                                             raw_data_table_name)

        update_table(sync_agent.conn_local,
                     'global_measurement_overview', ('data_synchronized', ),
                     (True, ),
                     condition=("uuid", uuid))
        sync_agent.conn_local.commit()
예제 #2
0
 def star_measurement(uuid, state):
     conn = SQL_database_manager().conn_local
     update_table(conn,
                  'global_measurement_overview',
                  ('starred', 'table_synchronized'), (state, False),
                  condition=('uuid', uuid))
     conn.commit()
예제 #3
0
 def update_name(uuid, name):
     conn = SQL_database_manager().conn_local
     update_table(conn,
                  'global_measurement_overview',
                  ('exp_name', 'table_synchronized'), (name, False),
                  condition=('uuid', uuid))
     conn.commit()
예제 #4
0
	def update_measurement(conn, meas_uuid, meas_table_name=None, start_time=None, stop_time=None,
			metadata=None, snapshot=None, keywords= None, data_size=None, data_synchronized=False, completed=False):
		'''
		fill in the addional data in a record of the measurements overview table.

		Args:
			meas_uuid (int) : record that needs to be updated
			meas_table_name (str) : name of the table that contains the raw measurement data
			start_time (long) : time in unix seconds since the epoch
			stop_time (long) : time in unix seconds since the epoch
			metadata (dict) : json string to be saved in the database
			snapshot (dict) : snapshot of the exprimental set up
			keywords (list) : keywords describing the measurement
			completed (bool) : tell that the measurement is completed.
		'''
		var_names = ['exp_data_location','metadata', 'snapshot', 'keywords', 'data_size', 'data_synchronized', 'completed']
		var_values = [meas_table_name, psycopg2.Binary(str(json.dumps(metadata)).encode('ascii')),
			psycopg2.Binary(str(json.dumps(snapshot)).encode('ascii')), psycopg2.extras.Json(keywords),
			data_size, str(data_synchronized), str(completed) ]

		if start_time is not None:
			var_names += ['start_time']
			var_values += [psycopg2.sql.SQL("TO_TIMESTAMP({})").format(psycopg2.sql.Literal(start_time))]
		if stop_time is not None:
			var_names += ['stop_time']
			var_values += [psycopg2.sql.SQL("TO_TIMESTAMP({})").format(psycopg2.sql.Literal(stop_time))]

		condition = ('uuid', meas_uuid)
		update_table(conn, measurement_overview_queries.table_name, var_names, var_values, condition)
예제 #5
0
    def sync_table(sync_agent, uuid):
        '''
		syncs row in the table to the remote for the given uuid
		'''
        # check if uuid exists
        entry_exists = select_elements_in_table(sync_agent.conn_remote,
                                                "global_measurement_overview",
                                                ('uuid', ),
                                                where=("uuid", uuid),
                                                dict_cursor=False)

        local_content = select_elements_in_table(sync_agent.conn_local,
                                                 "global_measurement_overview",
                                                 ('*', ),
                                                 where=("uuid", uuid),
                                                 dict_cursor=True)[0]
        sync_mgr_queries.convert_SQL_raw_table_entry_to_python(local_content)

        del local_content['id']
        local_content['table_synchronized'] = True

        if len(entry_exists) == False:
            insert_row_in_table(sync_agent.conn_remote,
                                'global_measurement_overview',
                                tuple(local_content.keys()),
                                tuple(local_content.values()))
        else:
            remote_content = select_elements_in_table(
                sync_agent.conn_remote,
                "global_measurement_overview", ('*', ),
                where=("uuid", uuid),
                dict_cursor=True)[0]
            sync_mgr_queries.convert_SQL_raw_table_entry_to_python(
                remote_content)

            del remote_content['id']

            content_to_update = dict()

            for key in remote_content.keys():
                if local_content[key] != remote_content[key]:
                    content_to_update[key] = local_content[key]

            update_table(sync_agent.conn_remote,
                         'global_measurement_overview',
                         content_to_update.keys(),
                         content_to_update.values(),
                         condition=("uuid", uuid))

        update_table(sync_agent.conn_local,
                     'global_measurement_overview', ('table_synchronized', ),
                     (True, ),
                     condition=("uuid", uuid))

        sync_agent.conn_local.commit()
        sync_agent.conn_remote.commit()
예제 #6
0
    def add_variable(conn, name, unit, category, step, value=0):
        # this will be the line where we set the value
        vals, last_update_id = var_sql_queries.update_val(conn, name=None, value=None)
        res = select_elements_in_table(conn, var_sql_queries.gen_table_overview_name(), ('name', ), where=('name', name))

        if len(res) == 0:
            insert_row_in_table(conn,  var_sql_queries.gen_table_overview_name(),  ('name', 'unit', 'category', 'step'), (name, unit, category, step))
            alter_table(conn, var_sql_queries.gen_table_content_name(), (name, ), ('FLOAT8',))

            update_table(conn, var_sql_queries.gen_table_content_name(), (name,), (value,), condition=('id', last_update_id))
            conn.commit()

        else: 
            print('Variable {} already present, skipping.'.format(name))
예제 #7
0
    def sync_raw_data(sync_agent, uuid):
        raw_data_table_name = select_elements_in_table(
            sync_agent.conn_local,
            'global_measurement_overview', ('exp_data_location', ),
            where=("uuid", uuid),
            dict_cursor=False)[0][0]

        data_table_queries.generate_table(sync_agent.conn_local,
                                          raw_data_table_name)
        sync_mgr_queries._sync_raw_data_table(sync_agent, raw_data_table_name)

        update_table(sync_agent.conn_local,
                     'global_measurement_overview', ('data_synchronized', ),
                     (True, ),
                     condition=("uuid", uuid))
        sync_agent.conn_local.commit()

        sync_mgr_queries._sync_raw_data_lobj(sync_agent, raw_data_table_name)
예제 #8
0
    def _sync_raw_data_lobj(conn_src, conn_dest, raw_data_table_name):
        res_src = select_elements_in_table(
            conn_src,
            raw_data_table_name, ('write_cursor', 'total_size', 'oid'),
            order_by=('id', ''))
        res_dest = select_elements_in_table(
            conn_dest,
            raw_data_table_name, ('write_cursor', 'total_size', 'oid'),
            order_by=('id', ''))

        print('update large object', raw_data_table_name)
        for i in range(len(res_src)):
            dest_cursor = res_dest[i]['write_cursor']
            src_cursor = res_src[i]['write_cursor']
            dest_oid = res_dest[i]['oid']
            src_oid = res_src[i]['oid']
            src_lobject = conn_src.lobject(src_oid, 'rb')
            dest_lobject = conn_dest.lobject(dest_oid, 'wb')

            while (dest_cursor != src_cursor):
                src_lobject.seek(dest_cursor * 8)
                dest_lobject.seek(dest_cursor * 8)
                if src_cursor * 8 - dest_cursor * 8 < 2_000_000:
                    mybuffer = np.frombuffer(
                        src_lobject.read(src_cursor * 8 - dest_cursor * 8))
                    dest_cursor = src_cursor
                else:
                    print(
                        f'large dataset, {(src_cursor*8-dest_cursor*8)*1e-9}GB'
                    )
                    mybuffer = np.frombuffer(src_lobject.read(2_000_000))
                    dest_cursor += int(2_000_000 / 8)
                dest_lobject.write(mybuffer.tobytes())

            dest_lobject.close()
            src_lobject.close()

            update_table(conn_dest,
                         raw_data_table_name, ('write_cursor', ),
                         (src_cursor, ),
                         condition=('oid', dest_oid))

        conn_dest.commit()
예제 #9
0
    def _sync_raw_data_lobj(sync_agent, raw_data_table_name):
        res_loc = select_elements_in_table(
            sync_agent.conn_local,
            raw_data_table_name, ('write_cursor', 'total_size', 'oid'),
            order_by=('id', ''))
        res_rem = select_elements_in_table(
            sync_agent.conn_remote,
            raw_data_table_name, ('write_cursor', 'total_size', 'oid'),
            order_by=('id', ''))

        for i in range(len(res_loc)):
            r_cursor = res_rem[i]['write_cursor']
            l_cursor = res_loc[i]['write_cursor']
            r_oid = res_rem[i]['oid']
            l_oid = res_loc[i]['oid']
            l_lobject = sync_agent.conn_local.lobject(l_oid, 'rb')
            r_lobject = sync_agent.conn_remote.lobject(r_oid, 'wb')

            while (r_cursor != l_cursor):
                l_lobject.seek(r_cursor * 8)
                r_lobject.seek(r_cursor * 8)
                if l_cursor * 8 - r_cursor * 8 < 2_000_000:
                    mybuffer = np.frombuffer(
                        l_lobject.read(l_cursor * 8 - r_cursor * 8))
                    r_cursor = l_cursor
                else:
                    print(f'large dataset, {(l_cursor*8-r_cursor*8)*1e-9}GB')
                    mybuffer = np.frombuffer(l_lobject.read(2_000_000))
                    r_cursor += int(2_000_000 / 8)
                r_lobject.write(mybuffer.tobytes())

            r_lobject.close()
            l_lobject.close()

            update_table(sync_agent.conn_remote,
                         raw_data_table_name, ('write_cursor', ), (l_cursor, ),
                         condition=('oid', r_oid))

        sync_agent.conn_remote.commit()
예제 #10
0
    def sync_table(sync_agent, uuid, to_local=False):
        '''
        syncs row in the table to the remote for the given uuid

        Args:
            sync_agent: class holding local and remote connection
            uuid (int): unique id of measurement
            to_local (bool): if True syncs from remote to local server
        '''
        if to_local:
            conn_src = sync_agent.conn_remote
            conn_dest = sync_agent.conn_local
        else:
            conn_src = sync_agent.conn_local
            conn_dest = sync_agent.conn_remote

        # check if uuid exists
        entry_exists = select_elements_in_table(conn_dest,
                                                "global_measurement_overview",
                                                ('uuid', ),
                                                where=("uuid", uuid),
                                                dict_cursor=False)

        source_content = select_elements_in_table(
            conn_src,
            "global_measurement_overview", ('*', ),
            where=("uuid", uuid),
            dict_cursor=True)[0]
        sync_mgr_queries.convert_SQL_raw_table_entry_to_python(source_content)

        del source_content['id']
        source_content['table_synchronized'] = True

        if len(entry_exists) == 0:
            print('create measurement row', uuid)
            insert_row_in_table(conn_dest, 'global_measurement_overview',
                                tuple(source_content.keys()),
                                tuple(source_content.values()))
        else:
            print('update measurement row', uuid)
            dest_content = select_elements_in_table(
                conn_dest,
                "global_measurement_overview", ('*', ),
                where=("uuid", uuid),
                dict_cursor=True)[0]
            sync_mgr_queries.convert_SQL_raw_table_entry_to_python(
                dest_content)

            del dest_content['id']

            content_to_update = dict()

            for key in dest_content.keys():
                if source_content[key] != dest_content[key]:
                    content_to_update[key] = source_content[key]

            update_table(conn_dest,
                         'global_measurement_overview',
                         content_to_update.keys(),
                         content_to_update.values(),
                         condition=("uuid", uuid))

        if not to_local:
            update_table(sync_agent.conn_local,
                         'global_measurement_overview',
                         ('table_synchronized', ), (True, ),
                         condition=("uuid", uuid))

        conn_src.commit()
        conn_dest.commit()