Ejemplo n.º 1
0
def api_totals():
    db, curs = database.connect()
    total_attempts = database.get_total_attempts(db, curs)
    total_ips = database.get_total_ips(db, curs)
    db.close()
    return "Total Attempts: {}<br>Total IPs: {}".format(
        total_attempts, total_ips)
Ejemplo n.º 2
0
def submit_trait():
    idx = request.form['traits']
    password = request.form['pass']
    if password == "scbd2019jaya":
        db = connect()
        cur = db.cursor()
        cur.execute("SELECT * FROM ontologi_vote WHERE id = '{}'".format(idx))
        res = []
        for row in cur.fetchall():
            res.append(row)
        cur.execute("SELECT COUNT(*) FROM ontologi WHERE words = '{}'".format(
            res[0][1]))
        words_counts = cur.fetchone()[0]
        cur.close()
        if words_counts == 0:
            cur = db.cursor()
            cur.execute(
                "INSERT INTO ontologi (words, traits, subtraits) VALUES ('{}', '{}', '{}')"
                .format(res[0][1], res[0][2], res[0][2]))
            db.commit()
            cur.close()
            cur = db.cursor()
            cur.execute("DELETE FROM ontologi_vote WHERE texts = '{}'".format(
                res[0][1]))
            db.commit()
            cur.close()
            db.close()
            flash('Sudah berhasil ditambahkan')
            return redirect('list_new_traits')
        else:
            return redirect('view_submit_traits')
    else:
        redirect('view_submit_traits')
Ejemplo n.º 3
0
 def get_lanes(self):
     db = helper.connect()
     cursor = db.cursor()
     string = "SELECT OBJECT.ID FROM OBJECT,ANNOTATION WHERE ANNOTATION.ID=OBJECT.PID AND ANNOTATION.FILENAME='reference'"
     cursor.execute(string)
     res = cursor.fetchone()
     lanes = []
     cluster_ids = []
     while res is not None:
         list_x,list_y = [],[]
         cursor_1 = db.cursor()
         string = "SELECT OBJECT.ID,PT_CAMERA_COOR.T, PT_CAMERA_COOR.X,PT_CAMERA_COOR.Y,PT_CAMERA_COOR_ADD_OPTS.CLUSTER FROM PT_CAMERA_COOR,OBJECT,ANNOTATION,PT_CAMERA_COOR_ADD_OPTS WHERE OBJECT.ID = PT_CAMERA_COOR.PID AND ANNOTATION.ID=OBJECT.PID AND ANNOTATION.FILENAME='reference' AND OBJECT.ID=? AND PT_CAMERA_COOR_ADD_OPTS.ID = PT_CAMERA_COOR.ID ORDER BY CAST(PT_CAMERA_COOR.T AS UNSIGNED) ASC"
         cursor_1.execute(string,[str(res[0])])
         res_1 = cursor_1.fetchone()
         cluster_id = None
         while res_1 is not None:
             list_x.append(float(res_1[2]))
             list_y.append(float(res_1[3]))
             if cluster_id == None:
                 cluster_id = res_1[4]
             res_1 = cursor_1.fetchone()
         
         smooth_list_x = s.smooth(np.asarray(list_x), window_len=3,window='flat')
         smooth_list_y = s.smooth(np.asarray(list_y), window_len=3,window='flat')
         theta_list = self.add_heading(smooth_list_x, smooth_list_y)
         cluster_ids.append(cluster_id)
         data_points_path = [(a,b) for a,b in zip(smooth_list_x,smooth_list_y)]
         lanes.append(data_points_path)
         res = cursor.fetchone()
     
     db.close()
     return lanes,cluster_ids   
Ejemplo n.º 4
0
def add_trait():
    words = request.form['words']
    traits = request.form['traits']
    if words != '' and traits != '':
        words_clean = re.sub(r'[^a-z]', ' ', words.lower())
        words_clean_fix = '_'.join(words_clean.split())
        db = connect()
        cur = db.cursor()
        cur.execute(
            "SELECT COUNT(*) FROM ontologi_vote WHERE texts = '{}'".format(
                words_clean_fix))
        words_counts = cur.fetchone()[0]
        cur.close()
        if words_counts == 0:
            cur = db.cursor()
            cur.execute(
                "INSERT INTO ontologi_vote (texts, traits, upvote, downvote) VALUES ('{}', '{}', '0', '0')"
                .format(words_clean_fix, traits))
            db.commit()
            cur.close()
            db.close()
        else:
            print('Kata sudah ada')

    return redirect('list_new_traits')
Ejemplo n.º 5
0
def api_recent(num):
    db, curs = database.connect()
    recents = database.get_recent(db, curs, num)
    ret_str = ""
    for row in recents:
        ret_str += str(row)
        ret_str += '<br>'
    db.close()
    return ret_str
Ejemplo n.º 6
0
def view_delete_traits():
    db = connect()
    cur = db.cursor()
    res = []
    cur.execute("SELECT * FROM ontologi_vote")
    for row in cur.fetchall():
        res.append(row)
    cur.close()
    db.close()
    return render_template('view_delete_traits.html', traits=res)
Ejemplo n.º 7
0
def get_list_all_traits():
    db = connect()
    cur = db.cursor()
    res = []
    cur.execute("SELECT * FROM ontologi")
    for row in cur.fetchall():
        res.append(row)
    cur.close()
    db.close()
    return render_template('list_all_traits.html', traits=res)
Ejemplo n.º 8
0
def downvote():
    texts = request.form['texts']
    db = connect()
    cur = db.cursor()
    cur.execute("UPDATE ontologi_vote \
        SET downvote = downvote + 1 \
        WHERE texts = '{}';".format(texts))
    db.commit()
    cur.close()
    db.close()
    return get_list_new_traits()
Ejemplo n.º 9
0
def result():
    sent = request.form['input_user']
    sent = sent.lower()
    sent = re.sub(r'[^a-z]', ' ', sent)
    tokens = sent.split()

    list_traits = [
        'Aggreeableness', 'Conscientiousness', 'Extraversion', 'Neuroticism',
        'Openness'
    ]
    list_freq = [0, 0, 0, 0, 0]
    i = 0

    db = connect()
    res_table = []

    while i < len(tokens):
        temp = []
        temp_traits = []
        temp_subtraits = []
        cur = db.cursor()
        cur.execute("SELECT * FROM ontologi WHERE words like '%" + tokens[i] +
                    "%'")
        for row in cur.fetchall():
            temp.append(row[1])
            temp_traits.append(row[2])
            temp_subtraits.append(row[3])
        maks = 0
        trait = ''
        subtrait = ''
        words = ''
        cur.close()
        for k in range(len(temp)):
            if re.sub('_', ' ', temp[k].lower()) in sent:
                if len(temp[k].split('_')) > maks:
                    words = temp[k].split('_')
                    trait = temp_traits[k]
                    subtrait = temp_subtraits[k]
                    maks = len(temp[k].split('_'))
        if maks > 0:
            res_table.append([' '.join(words), trait, subtrait])
            list_freq[list_traits.index(trait)] += 1
            i += maks
        else:
            i += 1

    db.close()

    return render_template('result.html',
                           trait=list_traits,
                           freq=list_freq,
                           table_traits=res_table,
                           len=len(list_freq))
Ejemplo n.º 10
0
def sample(sampling_tuple, num_samples):
    num_scenarios = list(range(1, sampling_tuple[0]))
    num_scenarios.remove(sampling_tuple[1])
    scene_2_sample_from = np.random.choice(num_scenarios)
    db = helper.connect()
    cursor = db.cursor()
    string = "SELECT MIN(CAST(OBJECT.STARTFRAME AS UNSIGNED)),MAX(CAST(OBJECT.ENDFRAME AS UNSIGNED)) FROM OBJECT,ANNOTATION WHERE OBJECT.PID = ANNOTATION.ID AND ANNOTATION.ID =?"
    cursor.execute(string, [str(scene_2_sample_from)])
    res = cursor.fetchone()
    min_frame, max_frame = int(res[0]), int(res[1])
    db.close()
    return np.random.randint(low=min_frame, high=max_frame,
                             size=num_samples), scene_2_sample_from
Ejemplo n.º 11
0
def delete_trait():
    idx = request.form['traits']
    password = request.form['pass']
    if password == "scbd2019jaya":
        db = connect()
        cur = db.cursor()
        cur.execute("DELETE FROM ontologi_vote WHERE id = '{}'".format(idx))
        db.commit()
        cur.close()
        db.close()
        return redirect('list_new_traits')
    else:
        redirect('view_delete_traits')
Ejemplo n.º 12
0
def main():
	conn, cur = db.connect()
	'''
	UNCOMMENT to import training data
	'''
	db.import_training_data(conn, cur)
	db.udpate_predict_training(conn, cur)

	while (True):
		line = raw_input('> ')
		if line == 'exit':
			db.close(conn, cur)
			sys.exit()
		print db.execute_cql(conn, cur, line)
Ejemplo n.º 13
0
def recent(num):
    db, curs = database.connect()
    recents = database.get_recent(db, curs, num)
    db.close()
    
    recent_map = folium.Map(location=[24.635246, 2.616971], zoom_start=2, tiles='CartoDB positron')
    
    for row in recents:
        coords = geo.get_coordinates(row[2])
        if coords is None:
            continue
        folium.Marker(location=coords).add_to(recent_map)
        
    recent_page = "<div style=\"height:50%; width:60%;\">" + str(recent_map.get_root().render()) + "</div>"
    
    return recent_page #send_from_directory('res/html', 'recent.html')
Ejemplo n.º 14
0
def get_tj_ids(frames_2_sample, scene_2_sample_from):
    db = helper.connect()
    cursor = db.cursor()
    tj_id_list, particle_id_list = [], []
    for frames in frames_2_sample:
        string = "SELECT PT_CAMERA_COOR.ID,OBJECT.ID FROM PT_CAMERA_COOR,OBJECT,ANNOTATION WHERE OBJECT.ID = PT_CAMERA_COOR.PID AND PT_CAMERA_COOR.T=? AND ANNOTATION.ID=OBJECT.PID AND ANNOTATION.ID=? ORDER BY CAST(PT_CAMERA_COOR.T AS UNSIGNED) ASC"
        cursor.execute(string, [str(frames), str(scene_2_sample_from)])
        res_1 = cursor.fetchone()
        id_list_in_frame, particle_list_in_frame = [], []
        while res_1 is not None:
            id_list_in_frame.append(res_1[1])
            particle_list_in_frame.append(res_1[0])
            res_1 = cursor.fetchone()
        tj_id_list.append(id_list_in_frame)
        particle_id_list.append(particle_list_in_frame)

    db.close()
    return particle_id_list, tj_id_list
Ejemplo n.º 15
0
def apt_totalips():
    db, curs = database.connect()
    ret_str = str(database.get_total_ips(db, curs))
    db.close()
    return ret_str
Ejemplo n.º 16
0
def form_input():
    db = helper.connect()
    cursor = db.cursor()
    string = "SELECT (SELECT COUNT(*) FROM OBJECT), (SELECT COUNT(*) FROM ANNOTATION);"
    cursor.execute(string)
    res = cursor.fetchone()
    count, count_scenarios = res[0], res[1]
    max_x, min_x, max_y, min_y = float('-inf'), float('inf'), float(
        '-inf'), float('inf')
    data_path, data_trajectory = dict(), dict()
    particle_ids = []
    ref_indexes = []
    for i in range(1, count + 1):
        cursor = db.cursor()
        string = "SELECT PT_CAMERA_COOR.ID,PT_CAMERA_COOR.PID,PT_CAMERA_COOR.X,PT_CAMERA_COOR.Y,PT_CAMERA_COOR.T,OBJECT.NAME,ANNOTATION.ID,OBJECT.STARTFRAME,OBJECT.ENDFRAME FROM PT_CAMERA_COOR,OBJECT,ANNOTATION WHERE OBJECT.ID = PT_CAMERA_COOR.PID AND OBJECT.ID=? AND ANNOTATION.ID=OBJECT.PID AND ANNOTATION.ID NOT IN ('10') ORDER BY CAST(PT_CAMERA_COOR.T AS UNSIGNED) ASC"
        cursor.execute(string, [i])
        res_1 = cursor.fetchone()
        if res_1 is None:
            continue
        h = int(res_1[6])

        if h == 3:
            ref_indexes.append(i)

        last_x, last_y = None, None
        list_x, list_y, list_type, list_x_v, list_y_v = [], [], [], [], []
        traj_length = 0
        while res_1 is not None:
            t_id = res_1[0]
            ''' since the trajectories are ordered now, and will be merged again in order again, there is no need to keep track of the indices explicitly for particle ids'''
            particle_ids.append(t_id)
            traj_length = traj_length + 1
            if max_x < float(res_1[2]):
                max_x = float(res_1[2])
            if min_x > float(res_1[2]):
                min_x = float(res_1[2])
            if max_y < float(res_1[3]):
                max_y = float(res_1[3])
            if min_y > float(res_1[3]):
                min_y = float(res_1[3])
            if last_x is not None and last_y is not None:
                velocity_x, velocity_y = (float(res_1[2]) - last_x) * fps, (
                    float(res_1[3]) - last_y) * fps
                list_x_v.append(velocity_x)
                list_y_v.append(velocity_y)
            last_x, last_y = float(res_1[2]), float(res_1[3])
            list_x.append(float(res_1[2]))
            list_y.append(float(res_1[3]))
            type_str = str(res_1[5][:3])
            if 'veh' == type_str:
                list_type.append('1')
            elif 'ped' == type_str:
                list_type.append('0')
            else:
                print(False)

            res_1 = cursor.fetchone()
        list_x_v.insert(0, list_x_v[0])
        list_y_v.insert(0, list_y_v[0])
        list_x_a = list(
            map(lambda v1, v2: (v2 - v1), list_x_v,
                list_x_v[1:] + [list_x_v[-1]]))
        list_y_a = list(
            map(lambda v1, v2: (v2 - v1), list_y_v,
                list_y_v[1:] + [list_y_v[-1]]))
        '''smooth_list_x = s.smooth(np.asarray(list_x), window_len=10,window='flat')
        smooth_list_y = s.smooth(np.asarray(list_y), window_len=10,window='flat')
        smooth_list_x = s.smooth(smooth_list_x)
        smooth_list_y = s.smooth(smooth_list_y)'''
        ''''z = [[e1,e2] for e1,e2 in zip(list_x,list_y)]
        a = np.asarray(z)
        smooth_list_x, smooth_list_y = s.gaussian_smoothing(a)'''
        smooth_list_x, smooth_list_y = list_x, list_y
        #print('size-x',len(list_x),len(smooth_list_x))
        #print('size-y',len(list_y),len(smooth_list_y))
        theta_list = add_heading(smooth_list_x, smooth_list_y)
        theta_list = np.deg2rad(theta_list)
        if not np.all(np.isfinite(np.asarray(theta_list))) or np.any(
                np.isnan(np.asarray(theta_list))):
            print(theta_list)
        ''' theta goes in as radians. omega goes in as sin(theta2) - sin(theta1)'''
        omega_list = list(
            map(lambda v1, v2: (v2 - v1), list(np.sin(theta_list)),
                list(np.sin(theta_list))[1:] + [list(np.sin(theta_list))[-1]]))
        '''print_stats(omega_list,i)'''
        data_points_path = [
            (a, b, c)
            for a, b, c in zip(smooth_list_x, smooth_list_y, theta_list)
        ]
        data_points_trajectory = [
            (a, b, c, d, e, f, g1, h1, i1) for a, b, c, d, e, f, g1, h1, i1 in
            zip(smooth_list_x, smooth_list_y, theta_list, list_x_v, list_y_v,
                list_type, list_x_a, list_y_a, omega_list)
        ]
        if h in data_path:
            data_path[h].append(data_points_path)
            data_trajectory[h].append(data_points_trajectory)
        else:
            data_path[h] = [data_points_path]
            data_trajectory[h] = [data_points_trajectory]

    db.close()
    data_path_norm = data_path.copy()
    ''' Normalization'''
    '''
    for k in data_path_norm:
        for i in range(len(data_path_norm[k])):
            for j in range(len(data_path_norm[k][i])):
                data_path_norm[k][i][j] = ((data_path_norm[k][i][j][0] - min_x ) / (max_x - min_x) , (data_path_norm[k][i][j][1] - min_y ) / (max_y - min_y) , (data_path_norm[k][i][j][2]) / 360)
    '''

    return data_path, data_trajectory, particle_ids
Ejemplo n.º 17
0
def starred(ip):
    db, curs = database.connect()
    marker_color = database.get_color(db, curs, ip)
    db.close()
    return '<html><body style="background:{};"></body></html>'.format(
        marker_color)
Ejemplo n.º 18
0
def api_totalattempts():
    db, curs = database.connect()
    ret_str = str(database.get_total_attempts(db, curs))
    db.close()
    return ret_str
Ejemplo n.º 19
0
def get_a_data_batch():
    density_max = 0
    db = helper.connect()
    cursor = db.cursor()
    string = """SELECT PT_CAMERA_COOR.ID,PT_CAMERA_COOR.PID,PT_CAMERA_COOR.X,PT_CAMERA_COOR.Y,PT_CAMERA_COOR.T,PT_CAMERA_COOR_ADD_OPTS.THETA,PT_CAMERA_COOR_ADD_OPTS.X_V,PT_CAMERA_COOR_ADD_OPTS.Y_V,PT_CAMERA_COOR_ADD_OPTS.CLASS,ANNOTATION.ID,OBJECT.ID,PT_CAMERA_COOR_ADD_OPTS.CLUSTER 
                    FROM PT_CAMERA_COOR,PT_CAMERA_COOR_ADD_OPTS,OBJECT,ANNOTATION WHERE OBJECT.ID = PT_CAMERA_COOR.PID AND ANNOTATION.ID=OBJECT.PID AND ANNOTATION.ID NOT IN ('8') AND PT_CAMERA_COOR_ADD_OPTS.ID = PT_CAMERA_COOR.ID 
                        ORDER BY CAST(PT_CAMERA_COOR.T AS UNSIGNED) ASC"""
    cursor.execute(string)
    results = cursor.fetchall()

    x = map(list, list(results))
    x = list(x)
    n = np.asarray(x, dtype=np.float32)
    db.close()
    env_batch = np.zeros(shape=(BATCH_SIZE, 5, 6, 7), dtype=np.float32)
    env_batch_slice = np.zeros(shape=(5, 6, 7), dtype=np.float32)
    target_batch = np.zeros(shape=(BATCH_SIZE, 2, 1), dtype=np.float32)
    ego_batch = np.zeros(shape=(BATCH_SIZE, 7, 1), dtype=np.float32)
    '''here :env_batch = np.zeros(shape=(BATCH_SIZE,1,7), dtype = np.float32)
    here :env_batch_slice = np.zeros(shape=(1,7), dtype = np.float32)
    here :target_batch = np.zeros(shape=(BATCH_SIZE,2,1) , dtype = np.float32)
    here :ego_batch = np.zeros(shape=(BATCH_SIZE,7,1) , dtype = np.float32)'''
    ''' we need consecutive frames of the same annotation id having the same object ids
    once we have these 2 frames, we create a image grid and a target grid
    
    1. sample one scenario randomly between min and max but not in evaluation scenario
    2. get t_id range in scenario from 1. sample a t_id
    3. run 2 queries with (scenario id in 1 and t_id in 2) and (scenario id in 1 and t_id+1 in 2) 
    4. if list of object_ids in the result of both queries in 2 are same, use the result as consecutive frames. else from 2.
    '''
    fill_index = 0
    max_density_index = 0
    while fill_index < 20000:
        found_it = False
        while (not found_it):
            #grid_data = np.zeros(shape=(GRID_SIZE,GRID_SIZE,4))
            #grid_target = np.zeros(shape=(GRID_SIZE,GRID_SIZE,3))
            ''' Step 1'''
            min_s, max_s = np.amin(n[:, 9]), np.amax(n[:, 9])
            scenes = list(range(int(min_s), int(max_s) + 1))
            scenes.remove(8)
            scenario_id = np.random.choice(scenes)
            ''' Step 2'''
            c1 = n[:, 9] == scenario_id
            r = n[c1]
            min, max = np.amin(r[:, 4]), np.amax(r[:, 4])
            t_id = np.random.randint(min, max + 1)
            ''' Step 3'''
            c2 = n[:, 4] == t_id
            c3 = n[:, 4] == t_id + 1
            c4 = np.logical_and(c1, c2)
            c5 = np.logical_and(c1, c3)
            r1 = n[c4]
            r2 = n[c5]
            ''' Step 4'''
            object_ids_r1 = r1[:, 10]
            object_ids_r2 = r2[:, 10]
            '''if len(object_ids_r1) != density_2_look_4:
                found_it = False
                continue'''
            if np.array_equal(object_ids_r1, object_ids_r2):
                for o_ids in object_ids_r1:
                    env_batch_slice = np.zeros(shape=(5, 6, 6),
                                               dtype=np.float32)
                    '''here :env_batch_slice = np.zeros(shape=(1,6), dtype = np.float32)'''
                    #print('density is',len(object_ids_r1),' ego object id is',o_ids)
                    density_value = len(object_ids_r1)
                    if density_value > density_max:
                        density_max = density_value
                        max_density_index = fill_index
                    density_array = np.zeros(shape=(5, 6, 1), dtype=np.float32)
                    '''here :density_array = np.zeros(shape=(1,1), dtype = np.float32)'''
                    density_array.fill(density_value)
                    obj_ind = {0: 0, 1: 0, 2: 0, 3: 0, 4: 0}
                    for i in range(r1.shape[0]):
                        if r1[i, 10] == o_ids:
                            #print('ego obj id is',o_ids)
                            x, y = float(r1[i, 2]), float(r1[i, 3])
                            fea_vector = np.reshape(np.asarray([
                                x, y,
                                float(r1[i, 6]),
                                float(r1[i, 7]),
                                int(r1[i, 8]),
                                int(r1[i, 11]), density_value
                            ]),
                                                    newshape=(7, 1))
                            ego_batch[fill_index] = np.copy(fea_vector)
                            for i2 in range(r2.shape[0]):
                                if r2[i2, 10] == o_ids:
                                    #print('target entered for obj id',o_ids)
                                    y_a = float(r2[i2, 7]) - float(r1[i, 7])
                                    x_a = float(r2[i2, 6]) - float(r1[i, 6])
                                    x_n, y_n = float(r2[i, 2]), float(r2[i, 3])
                                    fea_vector = np.reshape(np.asarray(
                                        [x_a, y_a]),
                                                            newshape=(2, 1))
                                    target_batch[fill_index] = np.copy(
                                        fea_vector)
                        else:
                            #print('added env entry for object id',r1[i,10])
                            x, y = float(r1[i, 2]), float(r1[i, 3])
                            cluster_id = int(r1[i, 11])
                            fea_vector = np.asarray([
                                x, y,
                                float(r1[i, 6]),
                                float(r1[i, 7]),
                                int(r1[i, 8]) + 10,
                                int(r1[i, 11])
                            ])
                            if cluster_id < 5:
                                env_batch_slice[cluster_id,
                                                obj_ind[cluster_id]] = np.copy(
                                                    fea_vector)
                                '''here : env_batch_slice[0] = np.copy(fea_vector)'''
                                obj_ind[cluster_id] = obj_ind[cluster_id] + 1
                    #env_batch_slice = np.sort(env_batch_slice,axis=1)
                    env_batch[fill_index] = np.concatenate(
                        (env_batch_slice, density_array), axis=2)
                    '''here : env_batch[fill_index] = np.concatenate((env_batch_slice,density_array),axis=1)'''
                    fill_index = fill_index + 1
                    print(fill_index)
                    print('------')
                found_it = True
    #       else:
    # print(False)
    #print('max density is ',density_max)
    return env_batch[0:fill_index, ], ego_batch[0:fill_index, ], target_batch[
        0:fill_index, ], (max_density_index, density_max)
Ejemplo n.º 20
0
# Draw maps using Folium and write to an html file.
# Author: N. Beckstead
#
# TODO: Debug when a marker field is None.
# TODO: Draw heatmap using log_mapper.attempts table.
# TODO: Optimize by looking up geo data first. Then call make_marker()
#

import folium
from folium import plugins
import db_helper as database
import geo_helper as geo
import server_vars

MARKER_RADIUS = 4
db, curs = database.connect()


#
# Main map function. Draw all maps.
#
def draw():

    markers_map = folium.Map(location=[24.635246, 2.616971],
                             zoom_start=3,
                             tiles='CartoDB dark_matter')
    heatmap = folium.Map(location=[24.635246, 2.616971],
                         zoom_start=3,
                         tiles='CartoDB positron')

    markers_map = make_markersmap(markers_map)
Ejemplo n.º 21
0
def get_a_evaluation_data_batch(for_dbn):
    db = helper.connect()
    cursor = db.cursor()
    string = """SELECT PT_CAMERA_COOR.ID,PT_CAMERA_COOR.PID,PT_CAMERA_COOR.X,PT_CAMERA_COOR.Y,PT_CAMERA_COOR.T,PT_CAMERA_COOR_ADD_OPTS.THETA,PT_CAMERA_COOR_ADD_OPTS.X_V,PT_CAMERA_COOR_ADD_OPTS.Y_V,PT_CAMERA_COOR_ADD_OPTS.CLASS,ANNOTATION.ID,OBJECT.ID,PT_CAMERA_COOR_ADD_OPTS.CLUSTER 
                    FROM PT_CAMERA_COOR,PT_CAMERA_COOR_ADD_OPTS,OBJECT,ANNOTATION WHERE OBJECT.ID = PT_CAMERA_COOR.PID AND ANNOTATION.ID=OBJECT.PID AND ANNOTATION.ID IN ('8') AND PT_CAMERA_COOR_ADD_OPTS.ID = PT_CAMERA_COOR.ID 
                        ORDER BY CAST(PT_CAMERA_COOR.T AS UNSIGNED) ASC"""
    cursor.execute(string)
    results = cursor.fetchall()

    x = map(list, list(results))
    x = list(x)
    n = np.asarray(x, dtype=np.float32)
    db.close()
    env_batch = np.zeros(shape=(BATCH_SIZE, 5, 6, 7), dtype=np.float32)
    target_batch = np.zeros(shape=(BATCH_SIZE, 2, 1), dtype=np.float32)
    ego_batch = np.zeros(shape=(BATCH_SIZE, 7, 1), dtype=np.float32)
    t_start, t_end = int(n[0, 4]), int(n[n.shape[0] - 1, 4] + 1)
    fill_index = 0
    for t_id in range(t_start, t_end - 2):
        c2 = n[:, 4] == t_id
        c3 = n[:, 4] == t_id + 1
        r1 = n[c2]
        r2 = n[c3]
        object_ids_r1 = r1[:, 10]
        object_ids_r2 = r2[:, 10]
        '''print('looking for density :',density_2_look_4)
        if len(object_ids_r1) != density_2_look_4:
                found_it = False
                continue'''
        for o_ids in object_ids_r1:
            density_value = len(object_ids_r1)
            obj_ind = 0
            env_batch_slice = np.zeros(shape=(5, 6, 6), dtype=np.float32)
            '''here : env_batch_slice = np.zeros(shape=(1,6), dtype = np.float32)'''
            density_array = np.zeros(shape=(5, 6, 1), dtype=np.float32)
            '''here : density_array = np.zeros(shape=(1,1), dtype = np.float32)'''
            density_array.fill(len(object_ids_r1))
            for i in range(r1.shape[0]):
                if r1[i, 10] == o_ids:
                    x, y = float(r1[i, 2]), float(r1[i, 3])
                    if for_dbn is False:
                        fea_vector = np.reshape(np.asarray([
                            x, y,
                            float(r1[i, 6]),
                            float(r1[i, 7]),
                            int(r1[i, 8]),
                            int(r1[i, 11]), density_value
                        ]),
                                                newshape=(7, 1))
                    else:
                        fea_vector = np.reshape(np.asarray([
                            x, y,
                            float(r1[i, 5]),
                            float(r1[i, 6]),
                            float(r1[i, 7]),
                            int(r1[i, 8]),
                            int(r1[i, 11])
                        ]),
                                                newshape=(7, 1))
                    ego_batch[fill_index] = np.copy(fea_vector)
                    for i2 in range(r2.shape[0]):
                        if r2[i2, 10] == o_ids:
                            print('target entered for obj id', o_ids)
                            y_a = float(r2[i2, 7]) - float(r1[i, 7])
                            x_a = float(r2[i2, 6]) - float(r1[i, 6])
                            fea_vector = np.reshape(np.asarray([x_a, y_a]),
                                                    newshape=(2, 1))
                            target_batch[fill_index] = np.copy(fea_vector)
                else:
                    #print('added env entry for object id',r1[i,10])
                    x, y = float(r1[i, 2]), float(r1[i, 3])
                    cluster_id = int(r1[i, 11])
                    fea_vector = np.asarray([
                        x, y,
                        float(r1[i, 6]),
                        float(r1[i, 7]),
                        int(r1[i, 8]) + 10,
                        int(r1[i, 11])
                    ])
                    if cluster_id < 5:
                        env_batch_slice[cluster_id,
                                        obj_ind] = np.copy(fea_vector)
                        ''' here : env_batch_slice[0] = np.copy(fea_vector)'''
                    obj_ind = obj_ind + 1
            if for_dbn:
                env_batch_slice = np.sort(env_batch_slice, axis=1)
            env_batch[fill_index] = np.concatenate(
                (env_batch_slice, density_array), axis=2)
            '''here : env_batch[fill_index] = np.concatenate((env_batch_slice,density_array),axis=1)'''
            fill_index = fill_index + 1
            print(fill_index)
            print('------')
    print('size of eval set :', fill_index)
    return env_batch[0:fill_index, ], ego_batch[0:fill_index, ], target_batch[
        0:fill_index, ], (0, 0)