def run_server(add_render_gen_args, render_gen): logging.basicConfig(level=logging.INFO) parser = argparse.ArgumentParser( formatter_class=argparse.ArgumentDefaultsHelpFormatter) parser.add_argument( '--source', help='/dev/videoN:FMT:WxH:N/D or .mp4 file or image file', default='/dev/video1:YUY2:800x600:24/1') parser.add_argument('--bitrate', type=int, default=1000000, help='Video streaming bitrate (bit/s)') parser.add_argument('--loop', default=False, action='store_true', help='Loop input video file') add_render_gen_args(parser) args = parser.parse_args() gen = render_gen(args) camera = make_camera(args.source, next(gen), args.loop) assert camera is not None with StreamingServer(camera, args.bitrate) as server: def render_overlay(tensor, layout, command): overlay = gen.send((tensor, layout, command)) server.send_overlay(overlay if overlay else EMPTY_SVG) camera.render_overlay = render_overlay signal.pause()
def run_server(q): logging.basicConfig(level=logging.INFO) parser = argparse.ArgumentParser( formatter_class=argparse.ArgumentDefaultsHelpFormatter) parser.add_argument( '--source', help='/dev/videoN:FMT:WxH:N/D or .mp4 file or image file', default='/dev/video0:YUY2:640x480:30/1') parser.add_argument('--bitrate', type=int, default=1000000, help='Video streaming bitrate (bit/s)') parser.add_argument('--loop', default=False, action='store_true', help='Loop input video file') args = parser.parse_args() #gen = model.render_gen(args) camera = make_camera(args.source, args.loop) camera.stupid_overlay = stupid_overlay print(camera.stupid_overlay) with StreamingServer(camera, q, args.bitrate) as server: signal.pause()
def run_server(q): logging.basicConfig(level=logging.INFO) parser = argparse.ArgumentParser(formatter_class=argparse.ArgumentDefaultsHelpFormatter) parser.add_argument('--source', help='/dev/videoN:FMT:WxH:N/D or .mp4 file or image file', default='/dev/video0:YUY2:640x480:30/1') parser.add_argument('--bitrate', type=int, default=1000000, help='Video streaming bitrate (bit/s)') parser.add_argument('--loop', default=False, action='store_true', help='Loop input video file') args = parser.parse_args() camera = make_camera(args.source, [300,300], args.loop) def render_overlay(tensor, layout): print(tensor.shape) # test = tensor.reshape(224, 224, 3) # im = PIL.Image.fromarray(numpy.uint8(test)) # self.img = im #print(tensor.shape) #self.overlay = self.gen.send((tensor, layout, command)) def stupid_overlay(tensor, layout): print(tensor.shape) # test = tensor.reshape(480, 640, 3) # im = PIL.Image.fromarray(test) camera.stupid_overlay = stupid_overlay camera.render_overlay = render_overlay with StreamingServer(camera, q, args.bitrate) as server: signal.pause()
def run_server(model): logging.basicConfig(level=logging.INFO) parser = argparse.ArgumentParser( formatter_class=argparse.ArgumentDefaultsHelpFormatter) parser.add_argument( '--source', help='/dev/videoN:FMT:WxH:N/D or .mp4 file or image file', default='/dev/video0:YUY2:640x480:30/1') parser.add_argument('--bitrate', type=int, default=1000000, help='Video streaming bitrate (bit/s)') parser.add_argument('--loop', default=False, action='store_true', help='Loop input video file') model.add_render_gen_args(parser) args = parser.parse_args() gen = model.render_gen(args) camera = make_camera(args.source, next(gen), args.loop) assert camera is not None with StreamingServer(camera, args.bitrate) as server: def render_overlay(tensor, layout, command): print(tensor.shape, "render overlay") # overlay = gen.send((tensor, layout, command)) # server.send_overlay(overlay if overlay else EMPTY_SVG) def stupid_overlay(tensor, layout, command): print(tensor.shape, "stupoid_overlaty") def kill(): pass camera.render_overlay = render_overlay camera.stupid_overlay = stupid_overlay signal.pause()
def run_server(q): logging.basicConfig(level=logging.INFO) parser = argparse.ArgumentParser( formatter_class=argparse.ArgumentDefaultsHelpFormatter) parser.add_argument( '--source', help='/dev/videoN:FMT:WxH:N/D or .mp4 file or image file', default='/dev/video0:YUY2:640x480:30/1') parser.add_argument('--bitrate', type=int, default=1000000, help='Video streaming bitrate (bit/s)') args = parser.parse_args() camera = make_camera(args.source) with StreamingServer(camera, q, args.bitrate) as server: signal.pause()
"move_right": pygame.K_d or pygame.K_RIGHT, "move_up": pygame.K_w or pygame.K_UP, "move_down": pygame.K_s or pygame.K_DOWN, "select": pygame.K_q } screen_size = (1024, 576) screen = pygame.display.set_mode(screen_size) ################# ## C A M E R A ## ################# camera: Camera = make_camera(y=(32 * 12), x_min=0, x_max=512, y_min=0, y_max=192) ################# ## P L A Y E R ## ################# import playermaker from playermaker import * PlayerObject = Player(0, 0, 32, 32, "resources/sprites/player.png") ############# ## G R I D ## #############
FOVY = 45.0 PIXWIDTH = 140 PIXHEIGHT = 100 prims = [sc.make_sphere(vr.make_vector(0.0, 0.0, -4.0), 1.0), sc.make_sphere(vr.make_vector(-3.0, -2.0, -11.0), 2.0)] lights = [lt.make_light(2.5, 4.0, 10.0, 7.0),] eye = vr.make_vector(0.0, 0.0, 0.0) up = vr.make_vector(0.0, 1.0, 0.0) center = vr.make_vector(0.0, 0.0, -1.0) geo = ca.make_view_geometry(eye, up, center) camera = ca.make_camera(eye, geo, PIXWIDTH, PIXHEIGHT, FOVX, FOVY) film = fm.create_film(PIXWIDTH, PIXHEIGHT) def pytracer(): for j in range(PIXHEIGHT): for i in range(PIXWIDTH): ray = camera(i, j) color = int(rt.get_color(prims, lights, ray)) fm.develop(film, i, j, color) fm.display(film) if __name__ == "__main__": pytracer()