Ejemplo n.º 1
0
def display_depth(dev, data, timestamp):
    global image_depth
    global filenum
    filenum = filenum+1
    #print data.shape
    '''
    print type(data)
    print 100.0/(-0.00307 * data[240,320] + 3.33)
    print 0.1236 * math.tan(data[240,320] / 2842.5 + 1.1863)
    print data[240,320]
    print '-------'
    '''
    depth = 100/(-0.00307*data + 3.33)
    #savedepth(depth)
    t = time.time()
    fname = '%s/D%s.nparray'%(directory,t)
    np.save(fname, data)
    print "Writing %s"%fname
    data = frame_convert.pretty_depth(data)
    
    mp.gray()
    mp.figure(1)
    if image_depth:
        image_depth.set_data(data)
    else:
        image_depth = mp.imshow(data, interpolation='nearest', animated=True)
    mp.draw()
Ejemplo n.º 2
0
def display_depth(dev, data, timestamp):
    #numpy.set_printoptions(threshold='nan')
    #  numpy.savetxt("outputnumpy.txt", data, fmt='%f', delimiter=',', newline='] ')
    #numpy.ndarray.tofile("outputnumpy.txt", sep=' ', format='%f')
    #data.numpy.tolist()
    #a=numpy.fromstring(data)
    #  M=[[int(num) for num in line.strip().split()] for line in data.split('\n')]
    array = frame_convert.pretty_depth(data)
    print array
Ejemplo n.º 3
0
def display_depth(dev, data, timestamp):
  #numpy.set_printoptions(threshold='nan')
  #  numpy.savetxt("outputnumpy.txt", data, fmt='%f', delimiter=',', newline='] ')
  #numpy.ndarray.tofile("outputnumpy.txt", sep=' ', format='%f')
  #data.numpy.tolist()
  #a=numpy.fromstring(data)
  #  M=[[int(num) for num in line.strip().split()] for line in data.split('\n')]
  array = frame_convert.pretty_depth(data)
  print array
def log_dep(dev, data, timestamp):
    global rgb_data, dep_data, frame_number, rgb_time

    data = frame_convert.pretty_depth(data)[:,:,None]
    dep_data.append( data )
    rgb_data.append( rgb_temp )
    frame_number += 1
    print "Writing frame", frame_number, 
    print "\tColor data is %ims stale" % ((time.time() - rgb_time)*1000)
Ejemplo n.º 5
0
def display_depth(dev, data, timestamp):
    global image_depth
    data = frame_convert.pretty_depth(data)
    mp.gray()
    mp.figure(1)
    if image_depth:
        image_depth.set_data(data)
    else:
        image_depth = mp.imshow(data, interpolation="nearest", animated=True)
    mp.draw()
Ejemplo n.º 6
0
def display_depth(dev, data, timestamp):
    global image_depth
    data = frame_convert.pretty_depth(data)
    mp.gray()
    mp.figure(1)
    if image_depth:
        image_depth.set_data(data)
    else:
        image_depth = mp.imshow(data, interpolation='nearest', animated=True)
    mp.draw()
Ejemplo n.º 7
0
def get_depth(raw=False):
    frame = fn.sync_get_depth()[0]
    if not raw:
        frame = frame_convert.pretty_depth(frame)
    if raw:
        frame = make_pretty_raw(frame)
    else:
        frame = np.invert(frame)
    # frame /= 8
    # frame = frame.astype(np.uint8)
    return frame
Ejemplo n.º 8
0
def display_depth(dev, data, timestamp):
    global image_depth
    data = frame_convert.pretty_depth(data)
    mp.gray()
    mp.figure(1)
    if image_depth:
        image_depth.set_data(data)
    else:
        image_depth = mp.imshow(data, interpolation='nearest', animated=True)
    mp.draw()
    im = Image.fromarray(data)
    im.save('./static/image/img2.jpg')
Ejemplo n.º 9
0
def display_depth(data):
    global image_depth
    global filenum
    filenum = filenum+1
    #print data.shape
    '''
    print type(data)
    print 100.0/(-0.00307 * data[240,320] + 3.33)
    print 0.1236 * math.tan(data[240,320] / 2842.5 + 1.1863)
    print data[240,320]
    print '-------'
    '''
    depth = 100/(-0.00307*data + 3.33)
    savedepth(depth)
    data = frame_convert.pretty_depth(data)
    
    mp.gray()
    mp.figure(1)
    if image_depth:
        image_depth.set_data(data)
    else:
        image_depth = mp.imshow(data, interpolation='nearest', animated=True)
    mp.draw()
Ejemplo n.º 10
0
def get_depth():
	d,_ = freenect.sync_get_depth()
	
	try:
		np.where(d <= 600)[0][0]
		points = np.where(d <=600)
		go = True
	except:
		IndexError
		go = False
	
	if go:
		device.emit(uinput.EV_ABS, uinput.ABS_X, points[0][0], syn=False)
		device.emit(uinput.EV_ABS, uinput.ABS_Y, points[0][1])
#		time.sleep(0.01)
		print "x: %d"%(points[0][0])
		print "y: %d"%(points[0][1])
	
	else:
		device.emit(uinput.EV_ABS, uinput.ABS_X, 10, syn=False)
		device.emit(uinput.EV_ABS, uinput.ABS_Y, 20)
		print 'no 2000'
		print d[240, 320]
	return frame_convert.pretty_depth(d)[120:360, 160:480]
Ejemplo n.º 11
0
def get_depth():
    data = freenect.sync_get_depth()[0]
    print type(data)
    return {'pretty':frame_convert.pretty_depth(data),'raw':data}
Ejemplo n.º 12
0
def get_depth():
    img=frame_convert.pretty_depth(sync_get_depth()[0])
    return img
Ejemplo n.º 13
0
def get_depth():
    return frame_convert.pretty_depth(sync_get_depth()[0])
Ejemplo n.º 14
0
def get_depth():
    """ This function obtains the depth image from the kinect, if any is
        connected.
    """
    img = frame_convert.pretty_depth(sync_get_depth()[0])
    return img
Ejemplo n.º 15
0
def get_depth():
    return frame_convert.pretty_depth(opennpy.sync_get_depth()[0])
Ejemplo n.º 16
0
def get_depth():
    #array = array.astype(np.uint8)
    return frame_convert.pretty_depth(freenect.sync_get_depth()[0])
Ejemplo n.º 17
0
def get_depth():
    temp = freenect.sync_get_depth()[0]
    return frame_convert.pretty_depth(temp)
Ejemplo n.º 18
0
					pyr_area = moments['m00']
					xpyr = int(moments['m10']/moments['m00'])         # cx = M10/M00
					ypyr = int(moments['m01']/moments['m00'])         # cy = M01/M00

	
	if second_area < 1000:
		x3 = x2
		y3 = y2
	
	table.PutNumber(u"x3", (x3-320))
	table.PutNumber(u"y3", (y3-240))
	table.PutNumber(u"x2", (x2-320))
	table.PutNumber(u"y2", (y2-240))
	table.PutNumber(u"xpyr", (xpyr-320))
	table.PutNumber(u"ypyr", (ypyr-240))
	print xpyr, ypyr
	return


table.PutNumber(u"PYRAMIDSHOT", 0)

while 1:
	ir = frame_convert.pretty_depth(freenect.sync_get_video(0, freenect.VIDEO_IR_8BIT)[0])
	morph = denoise(ir)
	centroid(morph)
	if int(time.time() - start) > pic_num:
		cv2.imwrite('/media/DISK_IMG/Robotpics/inputtest'+str(pic_num)+'.jpg',ir)
		cv2.imwrite('/media/DISK_IMG/Robotpics/filttest'+str(pic_num)+'.jpg',morph)
		pic_num+=1
	print (time.time() - start)
# 
position = (0, 0) 	# Circle position
precision = 10 		# Amount of pixels to skip each step
threshold = 20 		# Threshold for depth pixels
oldMinVal = 255		# 

while True:

	# Reset these params every at every frame catch
	meanX = 0
	MeanY = 0
	count = 0
	minVal = 255

	# Get depth and try to normalize it
	depth = frame_convert.pretty_depth(freenect.sync_get_depth()[0])

	# Loop through pixels and find the closest ones, Calculate thier mean position
	for y in xrange(0, len(depth), precision):
		for x in xrange(0, len(depth[0]), precision):
			if depth[y][x] < minVal:
				minVal = depth[y][x]

			if depth[y][x] < oldMinVal + threshold:
				meanX = meanX + x
				MeanY = MeanY + y
				count = count + 1

	# Set previous frame threshold value to new min value
	oldMinVal = minVal
Ejemplo n.º 20
0
def get_depth():
    """ This function obtains the depth image from the kinect, if any is
        connected.
    """
    img = frame_convert.pretty_depth(sync_get_depth()[0])
    return img
def get_depth():
    return frame_convert.pretty_depth(freenect.sync_get_depth()[0])
Ejemplo n.º 22
0
def display_depth(dev, data, timestamp):
    global keep_running
    cv2.imshow('Depth', frame_convert.pretty_depth(data))
    if cv2.waitKey(10) == 27:
        keep_running = False