コード例 #1
0
 def testGetFile(self):
     logger = data_logger.Logger(self.file_prefix)
     expected = os.path.join('%s_1.data' % self.file_prefix)
     self.assertEqual(expected, logger.file_path)
     logger.GetFile(256)
     expected = os.path.join('%s_2.data' % self.file_prefix)
     self.assertEqual(expected, logger.file_path)
コード例 #2
0
def main(max_time, altitude_dif, delay_sample):
    formatted_time = datetime.datetime.utcnow().isoformat()
    print('[{}][main] Starting main'.format(formatted_time))

    bmp280_0 = bmp280.BMP280(delay_sample - 0.01)
    dht11_0 = dht11.DHT11(pin=4)
    mpu9250_0 = mpu9250.MPU9250()
    servo_0 = servo.Servo(0)

    deployer_0 = parachute_deployer.Deployer(bmp280_0, servo_0, altitude_dif,
                                             delay_sample)
    logger_0 = data_logger.Logger(bmp280_0, dht11_0, mpu9250_0)
    camera_0 = camera.Camera()

    deployer_thread = threading.Thread(target=deployer_0.main)
    logging_thread = threading.Thread(target=logger_0.log)

    deployer_thread.start()
    logging_thread.start()

    camera_0.start_recording()

    finisher_input_thread = threading.Thread(target=finisher_input,
                                             args=(deployer_0, logger_0,
                                                   camera_0),
                                             daemon=True)
    finisher_time_thread = threading.Thread(target=finisher_time,
                                            args=(deployer_0, logger_0,
                                                  camera_0, max_time),
                                            daemon=True)
    finisher_input_thread.start()
    finisher_time_thread.start()
コード例 #3
0
 def testWriteAndReadBack(self):
     logger = data_logger.Logger(self.file_prefix)
     logger.WriteProto(self.point)
     logger.WriteProto(self.point)
     logger.current_file.flush()
     file_points = []
     for file_point in logger.ReadProtos():
         file_points.append(file_point)
     expected = [self.point, self.point]
     self.assertEqual(expected, file_points)
コード例 #4
0
    def testMidHeader(self):
        logger = data_logger.Logger(self.file_prefix)
        logger.WriteProto(self.point)
        logger.current_file.flush()
        with open(logger.file_path, 'ab') as temp_file:
            temp_file.write((1).to_bytes(data_logger.PROTO_LEN_BYTES,
                                         data_logger.BYTE_ORDER))

        file_points = []
        for file_point in logger.ReadProtos():
            file_points.append(file_point)
        expected = [self.point]
        self.assertEqual(expected, file_points)
コード例 #5
0
    def testShortWrite(self):
        logger = data_logger.Logger(self.file_prefix)
        logger.WriteProto(self.point)
        logger.WriteProto(self.point)
        logger.current_file.flush()
        with open(logger.file_path, 'rb') as temp_file:
            contents = temp_file.read()
        with open(logger.file_path, 'wb') as temp_file:
            temp_file.write(contents[:-1])

        file_points = []
        for file_point in logger.ReadProtos():
            file_points.append(file_point)
        expected = [self.point]
        self.assertEqual(expected, file_points)
コード例 #6
0
def ReplayLog(filepath, include_sleep=False):
    """Replays data, extermely useful to LED testing.

  Args:
    filepath: A string of the path of lap data.
    include_sleep: If True replays adds sleeps to simulate how data was
                   processed in real time.

  Returns:
    A exit_speed.ExitSpeed instance that has replayed the given data.
  """
    logging.info('Replaying %s', filepath)
    logger = data_logger.Logger(filepath)
    points = list(logger.ReadProtos())
    logging.info('Number of points %d', len(points))
    if include_sleep:
        replay_start = time.time()
        time_shift = int(replay_start * 1e9 - points[0].time.ToNanoseconds())
        session_start = None
    else:
        FLAGS.set_default('commit_cycle', 10000)
    es = exit_speed.ExitSpeed(live_data=not include_sleep)
    for point in points:
        if include_sleep:
            point.time.FromNanoseconds(point.time.ToNanoseconds() + time_shift)
            if not session_start:
                session_start = point.time.ToMilliseconds() / 1000

        es.point = point
        es.ProcessSession()
        if include_sleep:
            run_delta = time.time() - replay_start
            point_delta = point.time.ToMilliseconds() / 1000 - session_start
            if run_delta < point_delta:
                time.sleep(point_delta - run_delta)

    if not include_sleep:
        time.sleep(1)
        qsize = len(es.pusher.point_queue)
        while qsize > 0:
            qsize = len(es.pusher.point_queue)
            logging.log_every_n_seconds(logging.INFO, 'Queue size %s', 2,
                                        qsize)
        es.pusher.stop_process_signal.value = True
        print(time.time())
        es.pusher.process.join(10)
        print(time.time())
    return es
コード例 #7
0
ファイル: FlyRTcore.py プロジェクト: flybrains/FlyRT
def run(cd, process_min=None, process_max=None, n_processes=None):

    # Read in parameters set using the UI
    input_vidpath = str(cd['path'])
    recording = bool(cd['record'])
    logging = bool(cd['log'])
    scaling = float(cd['scaling'])
    multi = cd['multi']
    n_inds = int(cd['n_inds'])
    heading = bool(cd['heading'])
    wings = bool(cd['wings'])
    ifd_on = bool(cd['IFD'])
    arena_mms = float(cd['arena_mms'])
    thresh_val = cd['thresh_val']
    crop = cd['crop']
    r = cd['r']
    mask = cd['mask']
    comm = str(cd['comm'])
    baud = int(cd['baud'])
    ifd_min = float(cd['IFD_thresh'])
    pulse_len = float(cd['pulse_len'])
    pulse_lockout = float(cd['pulse_lockout'])
    ifd_time_thresh = float(cd['IFD_time_thresh'])
    rt_ifd = bool(cd['RT_IFD'])
    rt_pp = bool(cd['RT_PP'])
    rt_pp_delay = int(cd['RT_PP_Delay'])
    rt_pp_period = int(cd['RT_PP_Period'])
    rt_LED_red = bool(cd['LED_color_Red'])
    rt_LED_green = bool(cd['LED_color_Green'])
    rt_LED_intensity = int(cd['LED_intensity'])
    FLIR = bool(cd['FLIR'])
    rt_LED_color = cd['rt_LED_color']
    colors = [(0, 255, 0), (0, 255, 255), (255, 0, 255), (255, 255, 255),
              (255, 255, 0), (255, 0, 0), (0, 0, 0)]

    # Initialize control flow parameters
    stop_bit, roll_call, all_present, history = False, 0, False, None

    # Initialize frame counters
    process_frame_count, fps_calc = 0, 0

    # Set scaling params
    mm2pixel = float(arena_mms / crop.shape[0])

    # Set up serial port if needed
    if (rt_ifd) or (rt_pp == True):
        ser = ard.init_serial(comm, baud)
        time.sleep(2)

    # Determine final output frame size
    vis, vis_shape = get_vis_shape(crop, scaling)

    # Generate loctions to store outuput data
    utils.file_ops()

    # Start csv logger
    if logging == True:
        logger = dl.Logger()
        logger.create_outfile()
        logger.write_header(process_frame_count,
                            n_inds,
                            ifd=True,
                            headings=True)

    # Use calculated shape to initialize writer
    if (recording == True):
        dt = datetime.now()
        out = cv2.VideoWriter(
            "generated_data/movies/" + str(dt.month) + "_" + str(dt.day) +
            "_" + str(dt.year) + "_" + str(dt.hour) + str(dt.minute) + '.avi',
            cv2.VideoWriter_fourcc(*'MJPG'), 30, (vis.shape[1], vis.shape[0]),
            True)

    # Diff calculation initialization
    last, df = 0, []

    # Individual location(s) measured in the last and current step [x,y]
    meas_last, meas_now = [[[0, 0], [0, 0]], [[0, 0], [0,
                                                       0]]], [[[0, 0], [0, 0]],
                                                              [[0, 0], [0, 0]]]

    # Initialize metrics backups
    old_ifds, old_angles, last_heads = np.asarray([0, 0, 0]), [0, 0], [0, 0]

    # Initialize RT Experiment Backups
    last_pulse_time, accum = 0, [time.time(), None]

    # FLIR Camera Stuff
    def capIm():
        # Function retreives buffer from FLIR camera in place of cv2 capture
        try:
            img = cam.retrieveBuffer()
        except PyCapture2.Fc2error as fc2Err:
            print("Error retrieving buffer :", fc2Err)
            return False, []
        data = np.asarray(img.getData(), dtype=np.uint8)
        data = data.reshape((img.getRows(), img.getCols()))
        return True, data

    # Check if Real Time experiment being done with FLIR camera, init connection if so
    if FLIR == True:
        bus = PyCapture2.BusManager()
        cam = PyCapture2.Camera()
        uid = bus.getCameraFromIndex(0)
        cam.connect(uid)
        cam.startCapture()
        this = 1
    else:
        cap = cv2.VideoCapture(input_vidpath)
        if cap.isOpened() == False:
            sys.exit(
                'Video file cannot be read! Please check input_vidpath to ensure it is correctly pointing to the video file'
            )
        else:
            print('Capture Opened')

    # Initialize time
    time0 = time.time()

    while (True):

        # Capture frame-by-frame
        n_inds = int(cd['n_inds'])
        time1 = time.time()

        if FLIR == True:
            ret = True
            ret, frame = capIm()
            frame = np.expand_dims(frame, 2)
            frame = cv2.cvtColor(frame, cv2.COLOR_GRAY2BGR)
            this += 1
        else:
            ret, frame = cap.read()
            this = cap.get(1)

        if ret == True:

            #Resize & keep color frame, make BW frame
            frame = crop_frame(frame, r)
            cl_frame = cv2.resize(frame,
                                  None,
                                  fx=scaling,
                                  fy=scaling,
                                  interpolation=cv2.INTER_LINEAR)
            bw_frame = cv2.cvtColor(cl_frame, cv2.COLOR_BGR2GRAY)
            _, thresh = cv2.threshold(bw_frame, thresh_val, 255, 0)

            #+++++++++++++++++++++
            # Detect contours
            list_of_detections, num_valid_contours = detect_blobs(
                cl_frame, thresh, meas_last, meas_now, last_heads, colors)

            #Assign detections to Track object
            try:
                tracker.assign_detections(list_of_detections, n_inds)
            except NameError:
                if len(list_of_detections) == n_inds:
                    tracker = Tracker(n_inds, list_of_detections,
                                      process_frame_count)
                    tracker.assign_detections(list_of_detections, n_inds)

            meas_now = [[track.get_mr_centroid(),
                         track.get_mr_head()]
                        for track in tracker.list_of_tracks]
            cent_meas = [[meas[0][0], meas[0][1]] for meas in meas_now]
            head_meas = [[meas[1][0], meas[1][1]] for meas in meas_now]

            ifds, old_ifds = metrics.ifd([np.asarray(cm) for cm in cent_meas],
                                         old_ifds, 0.5, mm2pixel, n_inds)

            # Check to see if expected animal-count is present
            if num_valid_contours == n_inds:
                if process_frame_count == 0:
                    all_present = True
                roll_call += 1
                if roll_call > 150:
                    all_present = True
            else:
                if process_frame_count < 300:
                    roll_call = 0

            # Begin logging and metrics if all animals present
            if all_present == True:

                # Generate trace history
                history = dl.history(process_frame_count, history, cent_meas)

                # Drawing
                new_frame = draw_global_results(cl_frame,
                                                cent_meas,
                                                head_meas,
                                                colors,
                                                history,
                                                n_inds,
                                                traces=True,
                                                heading=True)

                # Pass dictionary to data panel for visualization
                info_dict = {
                    "frame_count": process_frame_count,
                    "fps_calc": fps_calc,
                    "logging": logging,
                    "ifds": float(ifds),
                    "recording": recording
                }  #,
                #"heading": angles}

                # Generate visualization from drawn frame and info
                vis = generate_info_panel(new_frame, info_dict, vis_shape)

                # Send light commands if doing an RT expriment
                if rt_ifd == True:
                    last_pulse_time, accum = ard.lights_IFD(
                        ser, last_pulse_time, accum, ifd, ifd_min, pulse_len,
                        ifd_time_thresh, rt_LED_color, rt_LED_intensity)
                if (rt_pp == True) and ((time.time() - time0) >= rt_pp_delay):
                    last_pulse_time = ard.lights_PP(ser, last_pulse_time,
                                                    pulse_len, rt_pp_delay,
                                                    rt_pp_period, rt_LED_color,
                                                    rt_LED_intensity)
            else:
                # If all animals not yet present, use placeholders
                new_frame = cl_frame
                info_dict = None
                vis = generate_info_panel(new_frame, info_dict, vis_shape)

            # # Show present frame. Suppress to improve realtime speed
            cv2.imshow("FlyRT", vis)
            # #
            # # Write to .avi
            # if (recording==True):
            # 	out.write(vis)

            # Write current measurment and calcs to CSV
            # 	if (logging == True) and (all_present==True):
            # 		logger.write_meas(process_frame_count, float(process_frame_count/30), pixel_meas[0:n_inds], ifd, angles)
            #

            # FPS Calcs
            process_frame_count += 1
            fps = True
            time2 = time.time()
            fps_calc = float(process_frame_count / (time2 - time0))

            # GUI Stop Tracking Button
            stop_bit = config.stop_bit
            if cv2.waitKey(1) & (stop_bit == True):
                break
        else:
            break

    # Clean up
    if FLIR == True:
        cam.stopCapture()
        cam.disconnect()
    else:
        cap.release()
    if logging == True:
        logger.close_writer()
    if (recording == True):
        out.release()
    cv2.destroyAllWindows()
    cv2.waitKey()

    return None
コード例 #8
0
ファイル: data_threading.py プロジェクト: kevin2kc/kongo
import data_logger

import dataconnection
import data_ts_stock_daily_to_mongodb
import data_ts_finance_indicator_to_mongodb
import data_ts_finance_report_to_mongodb
import basicdata_ts_trade_cal_to_mongodb
import data_ts_daily_basic_to_mongodb
import basicdata_ts_stock_basic_to_mongodb
import basicdata_ts_index_basic_to_mongodb
import prefinal_data_ts_stock_daily_to_mongodb

if __name__ == '__main__':

    tsname = datetime.datetime.now().date().strftime("%Y%m%d")
    sys.stdout = data_logger.Logger("log/" + tsname + ".log", sys.stdout)
    sys.stderr = data_logger.Logger("log/" + tsname + "err.log", sys.stderr)

    print('---kongo开始下载数据---')
    t_start = datetime.datetime.now()
    print("kongo程序开始时间:{0}".format(str(t_start)))

    # 连接mangoDB
    db = dataconnection.Connection().getmongoconnection()

    # 连接tushare
    pro = dataconnection.Connection().gettushareconnection()

    # 基础股票信息
    basicdata_ts_stock_basic_to_mongodb.runallstock(pro, db, "stock_basic")
    # 基础指数信息
コード例 #9
0
    #
    # Check that we have the correct arguments
    #
    if len(sys.argv) != 2:
        usage()

    # Grab the URL to use
    [progname, rvi_url] = sys.argv

    # Welcome message
    print "RVI Big Data Device"
    print "Outbound URL to RVI:  ", rvi_url
    print "Inbound URL from RVI: ", service_url

    # Setip the logger.
    logger = data_logger.Logger()
    logger.start()

    # Setup outbound JSON-RPC connection to the RVI Service Edge
    rvi_server = jsonrpclib.Server(rvi_url)
    print "SERVER:", rvi_server

    # Setup AMB DBUS integartion
    amb = amb_dbus.DBUSMonitor(logger)
    amb.start()

    # Setup inbound JSON-RPC server
    rvi_callback = RVICallbackServer(amb, logger, rvi_server, service_url)
    rvi_callback.start()

    # Setup data sender