def prepare(self, input_stream, events, window_size, shift=0, late_time=10):
        if not isinstance(input_stream, db.Channels):
            raise Exception("windowize_by_events processor: data stream must be Channels.")

        if not isinstance(events, db.Event):
            raise Exception("windowize_by_events processor: received events type is not a list object.")

        if input_stream.SI.channels <= 0:
            raise Exception("windowize_by_events processor: invalid channels count.")

        if window_size <= 0:
            raise Exception("windowize_by_events processor: invalid window size.")

        self._events = db.make_empty(events.SI)
        self._window_size = window_size
        self._shift = shift

        max_buffer_size = (math.ceil(late_time * input_stream.SI.samplingRate)) * 2

        self._signal = np_rb.RingBuffer(capacity=max_buffer_size, dtype=(float, (input_stream.SI.channels,)))

        self._times = np_rb.RingBuffer(max_buffer_size, dtype=np.int64)

        self._si = si.Window(input_stream.SI.channels, self._window_size, input_stream.SI.samplingRate)

        return self._si
Exemplo n.º 2
0
    def __init__(self, buffer_len, init_data):
        """
        Хранит карту частот

        :param buffer_len: кол-во хранимых точек
        :param init_data: данные для инициализации
        """
        self.buffer_len = buffer_len
        self.freq_data_ = ringbuffer.RingBuffer(self.buffer_len)
        self.time_data_ = ringbuffer.RingBuffer(self.buffer_len)
        if 'depol_data' in init_data:
            for data in init_data['depol_data']:
                self.freq_data_.append(data['freq'])
                self.time_data_.append(data['time'])
        self.lock = Lock()
Exemplo n.º 3
0
    def __init__(self, buffer_len, init_data, depolarizer, freq_storage_):
        """
        Хранит точки асимметрии

        :param buffer_len: кол-во хранимых точек
        :param init_data: данные для инициализации
        :param depolarizer: экземпляр деполяризатора
        :param freq_storage_: экземпляр freq_storage_
        """
        self.buffer_len = buffer_len
        self.lock = Lock()
        self.data_ = ringbuffer.RingBuffer(self.buffer_len, dtype=chunk)
        self.time_data_ = ringbuffer.RingBuffer(self.buffer_len)  # Аналог self.data_['time'], только в np.array.
        self.depolarizer = depolarizer
        self.freq_storage_ = freq_storage_
        if 'asym_data' in init_data:
            for data in init_data['asym_data']:
                self.data_.append(data)
                self.time_data_.append(data['time'])
Exemplo n.º 4
0
    def __init__(self, X, Y, buffer_len):
        """
        Хранит гистограммы (пятно) с детектора

        :param X: количество шагов гистограммы по X
        :param Y: количество шагов гистограммы по Y
        :param buffer_len: кол-во хранимых гистограмм
        """

        self.buffer_len = buffer_len
        self.X = X
        self.Y = Y
        self.x_arr = np.arange(self.X)
        self.y_arr = np.arange(self.Y)
        self.hists_ = ringbuffer.RingBuffer(buffer_len, dtype=(np.int32, (self.X, self.Y)))
Exemplo n.º 5
0
def main():
    parser = argparse.ArgumentParser(description="Test AdmiralNet")
    parser.add_argument("--model_file", type=str, required=True)
    args = parser.parse_args()
    
    model_dir, model_file = os.path.split(args.model_file)
    config_path = os.path.join(model_dir,'config.pkl')
    config_file = open(config_path,'rb')
    config = pickle.load(config_file)
    model_prefix, _ = model_file.split(".")

    gpu = int(config['gpu'])
    use_float32 = bool(config['use_float32'])
    label_scale = float(config['label_scale'])
    context_length = int(config['context_length'])
    sequence_length = int(config['sequence_length'])
    hidden_dim = int(config['hidden_dim'])
    optical_flow = bool(config.get('optical_flow',''))
    rnn_cell_type='lstm'
    network = models.AdmiralNet(cell=rnn_cell_type,context_length = context_length, sequence_length=sequence_length, hidden_dim = hidden_dim, use_float32 = use_float32, gpu = gpu, optical_flow=optical_flow)
    state_dict = torch.load(args.model_file)
    network.load_state_dict(state_dict)
    network=network.float()
    network=network.cuda(0)
    print(network)
    vjoy_max = 32000
    
    throttle = torch.Tensor(1,10)
    brake = torch.Tensor(1,10)
    if(use_float32):
        network.float()
    else:
        network.double()
    if(gpu>=0):
        network = network.cuda(gpu)
    network.eval()
    vj = py_vjoy.vJoy()
    vj.capture(1) #1 is the device ID
    vj.reset()
    js = py_vjoy.Joystick()
    js.setAxisXRot(int(round(vjoy_max/2))) 
    js.setAxisYRot(int(round(vjoy_max/2))) 
    vj.update(js)
    time.sleep(2)
    inputs = []
    '''
    '''
    wheel_pred = cv2.imread('predicted_fixed.png',cv2.IMREAD_UNCHANGED)
    wheelrows_pred = 66
    wheelcols_pred = 66
    wheel_pred = cv2.resize(wheel_pred, (wheelcols_pred,wheelrows_pred), interpolation = cv2.INTER_CUBIC)
    buffer = numpy_ringbuffer.RingBuffer(capacity=context_length, dtype=(np.float32, (2,66,200) ) )

    dt = 12
    context_length=10
    debug=True
    app="F1 2017"
    dl = pyf1_datalogger.ScreenVideoCapture()
    dl.open(app,0,200,1700,300)
    interp = cv2.INTER_AREA
    if debug:
        cv2.namedWindow(app, cv2.WINDOW_AUTOSIZE)
    pscreen = fill_buffer(buffer,dl,dt=dt,context_length=context_length,interp=interp)
    buffer_torch = torch.rand(1,10,2,66,200).float()
    buffer_torch=buffer_torch.cuda(0)
    while(True):
        cv2.waitKey(dt)
        screen = grab_screen(dl)
        screen_grey = cv2.cvtColor(screen,cv2.COLOR_BGR2GRAY)
        screen_grey = cv2.resize(screen_grey,(200,66), interpolation=interp)
        flow = cv2.calcOpticalFlowFarneback(pscreen,screen_grey, None, 0.5, 3, 20, 8, 5, 1.2, 0)
        im= flow.transpose(2, 0, 1).astype(np.float32)
        buffer.append(im)
        pscreen = screen_grey
        buffer_torch[0] = torch.from_numpy(np.array(buffer))
        #print("Input Size: " + str(buffer_torch.size()))
        outputs = network(buffer_torch, throttle=None, brake=None )
        angle = outputs[0][0].item()
        print("Output: " + str(angle))
        scaled_pred_angle = 180.0*angle+7
        M_pred = cv2.getRotationMatrix2D((wheelrows_pred/2,wheelcols_pred/2),scaled_pred_angle,1)
        wheel_pred_rotated = cv2.warpAffine(wheel_pred,M_pred,(wheelrows_pred,wheelcols_pred))
        background = screen
        out_size = background.shape
        print(out_size)
        print(wheel_pred_rotated.shape)
        overlayed_pred = imutils.annotation_utils.overlay_image(background,wheel_pred_rotated,int((out_size[1]-wheelcols_pred)/2),int((out_size[0]-wheelcols_pred)/2))
        if debug:
            cv2.imshow(app,overlayed_pred)
        vjoy_angle = -angle*vjoy_max + vjoy_max/2.0
        js.setAxisXRot(int(round(vjoy_angle))) 
        js.setAxisYRot(int(round(vjoy_angle))) 
        vj.update(js)
        '''
        '''
        
    print(buffer.shape)