def data(logged_data): if not os.path.exists(logged_data): return '<br/>'.join(os.listdir('.')) data = DataLogger(logged_data).read() start_time = float(data[0][0]) data = [[float(a) - start_time, float(b)] for a, b in data] return render_template('data.html', data=data, data_filename=logged_data)
def __init__(self): self.ramlog = collections.deque(maxlen=settings['ramlog']['length']) self.fslog_last = 0 self.ramlog_last = 0 # avoid no data received error right at the beginning self.commdead_last = time.time() strace = settings['trace'] self.trace = ProzedaTrace(strace['dir'] + strace['prefix'], strace['suffix']) ProzedaLogdata.set_config(settings['system']) ProzedaHistory.set_config(settings) sserial = settings['serialport'] serialport = serial.Serial(sserial['port'], sserial['baudrate']) self.prozeda = ProzedaReader(serialport) sfslog = settings['fslog'] self.fslog = DataLogger(sfslog['dir'] + sfslog['prefix'], sfslog['suffix']) self.prozeda.evt_rawdata_received = self.trace.data_received JsonrpcRequestHandler.pirozeda = self
def __init__(self): self.config = Config() self.mcu = MCU() self.detecting = Detecting() # fb = Firebase() ws = webserver.WebServer() dl = DataLogger()
def webControl(x, y): x = int(x) y = int(y) usrMsgLogger = DataLogger() evobot = EvoBot("/dev/tty.usbmodemfa131", usrMsgLogger) head = Head(evobot) syringe = Syringe(evobot, 9) syringe.plungerSetConversion(1) evobot.home() head.move(x, y)
def record_logs(self, user_id, success=False): """ Special callback added to this callback plugin Called by Runner object :param user_id: :return: """ log_storage = DataLogger() return log_storage.save_log(user_id, self.logger.log, self.logger.runtime, success)
def start(self, duration, comment=None): if self.starttime == None: self.starttime = time.time() if duration is None: duration = 0 self.stoptime = time.time() + int(duration) if self.logger is None: self.logger = DataLogger(self.fileprefix, self.filesuffix, '%Y-%m-%d_%H.%M.%S', True) if comment is not None and self.logger is not None: self.logger.write(None, 'c', comment)
def loop(client, account_currency, payment_method_currency, transfers): sell_target = 141.00 quote_sell_at = 0.95 # Ratio of sell price when quotes will be obtained (0.00 to 1.00) exRates = MyExRates(client.client, account_currency, payment_method_currency) data_logger = DataLogger(exRates.dict, 'pricelog.csv') while True: exRates = MyExRates(client.client, account_currency, payment_method_currency) account = client.client.get_account(transfers.wallet) spot_value = exRates.spot_price * float(account['balance']['amount']) print() print('Account Balance: ' + str(account['balance']['amount']) + ' ' + account['balance']['currency']) print('Spot Price: ' + str(exRates.spot_price)) print('Spot Value: ' + str(spot_value)) print('Spot value at ' + str("%.2f" % (spot_value / sell_target * 100)) + '% of target (' + str(sell_target) + ').') if spot_value > sell_target * quote_sell_at: quote = client.client.sell(transfers.wallet, amount=str( account['balance']['amount']), currency='BTC', payment_method=transfers.payment_method, quote=True) print('Spot price within ' + str(quote_sell_at * 100) + '% of target - Getting quote') if float(quote['total']['amount']) > sell_target: print('Attempting Sell') sell = client.client.sell( transfers.wallet, amount=str(account['balance']['amount']), currency=account['balance']['currency'], payment_method=transfers.payment_method, quote=False) print('Sold ' + sell['total']['amount']) else: print('Quote of ' + quote['total']['amount'] + ' too low - No sell') data_logger.add_line(exRates.dict) time.sleep(10)
def main(): # Parse command line arguments parser = optparse.OptionParser() parser.add_option('-c', '--config', help = 'read configuration from FILE [default: %default]', metavar = 'FILE', default = 'campbell.conf') (options, args) = parser.parse_args() # Read configuration file cf = ConfigParser.SafeConfigParser() print 'configuration read from %s' % cf.read(options.config) for pakbus_id in cf.get('pakbus', 'dataloggers').split(','): pakbus_id = int(pakbus_id, base = 0) dl = DataLogger(cf.get('pakbus', 'host'), pakbus_id, int(cf.get('pakbus', 'my_node_id'), base = 0), cf.getint('pakbus', 'timeout')) print "ringing node {}...".format(pakbus_id) dl.ring() dl.hello()
# Set up logging if config.getboolean('monitor', 'debug', fallback=False): print("Debug enabled") level = logging.DEBUG else: level = logging.INFO duallog.setup('solar-monitor', minLevel=level, fileLevel=level, rotation='daily', keep=30) # Set up data logging # datalogger = None datalogger = DataLogger(config) # Set up device manager and adapter device_manager = SolarDeviceManager(adapter_name=config['monitor']['adapter']) logging.info("Adapter status - Powered: {}".format( device_manager.is_adapter_powered)) if not device_manager.is_adapter_powered: logging.info("Powering on the adapter ...") device_manager.is_adapter_powered = True logging.info("Powered on") # Run discovery device_manager.update_devices() logging.info("Starting discovery...") # scan all the advertisements from the services list device_manager.start_discovery()
def main(): args = vars(ap.parse_args()) # create frame counter fps_counter = FPSCounter() # total number of blinks TOTAL = 0 # initialize dlib's face detector (HOG-based) and then create # the facial landmark predictor print("[INFO] loading facial landmark predictor...") detector = dlib.get_frontal_face_detector() predictor = dlib.shape_predictor(args["shape_predictor"]) # grab the indexes of the facial landmarks for the left and # right eye, respectively (lStart, lEnd) = face_utils.FACIAL_LANDMARKS_IDXS["left_eye"] (rStart, rEnd) = face_utils.FACIAL_LANDMARKS_IDXS["right_eye"] # start the video stream thread print("[INFO] starting video stream thread...") print("[INFO] print q to quit...") if args['video'] == "camera": vs = VideoStream(src=0).start() vs.stream.set(cv2.CAP_PROP_FPS, 15) fileStream = False else: vs = FileVideoStream(args["video"]).start() fileStream = True fps = vs.stream.get(cv2.CAP_PROP_FPS) # create dataloggers datalogger = DataLogger(columns=['ear', 'adr']) # blink detector blink_detector = BlinkDetector(time_window=5, plot=args['graph'], frame_delay=10) # loop over frames from the video stream frame_cnt = 0 INIT_TIME = None while True: # if this is a file video stream, then we need to check if # there any more frames left in the buffer to process if fileStream and not vs.more(): break # get timestamp if fileStream: timestamp = frame_cnt / fps else: if INIT_TIME is None: INIT_TIME = time.time() timestamp = time.time() - INIT_TIME fps = fps_counter.tick() # get the new frame frame = vs.read() frame_cnt += 1 if frame is None: break frame = imutils.resize(frame, width=450) # it, and convert it to grayscale channels) gray = cv2.cvtColor(frame, cv2.COLOR_BGR2GRAY) # detect faces in the grayscale frame rects = detector(gray, 0) # loop over the face detections for rect in rects: # determine the facial landmarks for the face region, then # convert the facial landmark (x, y)-coordinates to a NumPy # array shape = predictor(gray, rect) shape = face_utils.shape_to_np(shape) # extract the left and right eye coordinates, then use the # coordinates to compute the eye aspect ratio for both eyes leftEye = shape[lStart:lEnd] rightEye = shape[rStart:rEnd] leftEAR = eye_aspect_ratio(leftEye) rightEAR = eye_aspect_ratio(rightEye) # compute the area-over-distance metric adr = AreaDistanceRatio.compute(leftEye, rightEye) # log ADR datalogger.log(adr, 'adr', timestamp) # average the eye aspect ratio together for both eyes ear = (leftEAR + rightEAR) / 2.0 # log EAR datalogger.log(ear, 'ear', timestamp) # compute the convex hull for the left and right eye, then # visualize each of the eyes leftEyeHull = cv2.convexHull(leftEye) rightEyeHull = cv2.convexHull(rightEye) cv2.drawContours(frame, [leftEyeHull], -1, (0, 255, 0), 1) cv2.drawContours(frame, [rightEyeHull], -1, (0, 255, 0), 1) # send new data to blink detector and check if it detected new blinks blink_detector.send(adr, timestamp) blink = blink_detector.get_blink() if blink is not None: blink_time, blink_dur = blink TOTAL += 1 print(f"[BLINK] time: {blink_time:.2f} dur: {blink_dur:.2f}") # draw the total number of blinks on the frame along with # the computed eye aspect ratio for the frame cv2.putText(frame, "Blinks: {}".format(TOTAL), (10, 30), cv2.FONT_HERSHEY_SIMPLEX, 0.7, (0, 0, 255), 2) cv2.putText(frame, "ADR: {:.2f}".format(ear), (300, 30), cv2.FONT_HERSHEY_SIMPLEX, 0.7, (0, 0, 255), 2) cv2.putText(frame, "FPS: {:.2f}".format(fps), (300, 60), cv2.FONT_HERSHEY_SIMPLEX, 0.7, (0, 0, 255), 2) # show the frame cv2.imshow("Frame", frame) key = cv2.waitKey(1) & 0xFF # if the `q` key was pressed, break from the loop if key == ord("q"): break # save datafile output_file = args['output_file'] if output_file == 'ask': output_file = input("Enter filename to save: ") if output_file is not None: datalogger.save(output_file) # do a bit of cleanup cv2.destroyAllWindows() vs.stop()
astro_status = True return astro_status # main program sense = SenseHat() # set correct rotation for astropi sense.set_rotation(270) # initialize text strings tekster.init_tekst() # initialize datalogger logger = DataLogger() # read baseline humidity until less than 1% variation baseline = baseline_humidity() last_baseline = datetime.now() # set timedelta between each new baseline time_between = timedelta(minutes=10) while True: # find astronaut astronaut_status = find_astronaut(baseline) # write to the datalog logger.log_data(astronaut_status)
from pandas import ExcelWriter # add path to import functions and classes (absolute path on the FPA's computer) sys.path.insert( 1, r"C:\\Users\\FireLab\\Desktop\\Simon\\FeedbackControl_MassExperiments\\classes_and_functions" ) from datalogger import DataLogger ##### # CONNECT TO LOAD CELL AND DATA LOGGER AND INSTANTIATE CLASSES ##### # create instance of the data logger and check connection print("\nConnection to data logger") rm, logger = DataLogger().new_instrument() ##### # CALIBRATE THE LAMPS. ##### # define constants lamp_voltage_limit = 4.5 # define arrays used for polynomial fitting hf_gauge_factor = 0.0001017 # V/kW/m2 nmbr_readings_pervoltage = 20 output_voltages = np.linspace(0, lamp_voltage_limit, 20) all_output_voltages = np.zeros(nmbr_readings_pervoltage * len(output_voltages) * 2) all_input_voltages = np.zeros_like(all_output_voltages)
opts.video = int(opts.video) except ValueError: pass frmbuf = VideoBuffer(opts.video, opts.start, opts.stop, historysize=LAST_DAY + 1, loop=opts.loop) else: frmbuf = ROSCamBuffer(opts.camtopic + "/image_raw", historysize=LAST_DAY + 1, buffersize=30) # start the node and control loop rospy.init_node("flownav", anonymous=False) datalog = DataLogger() if opts.publish else None kbctrl = None if opts.camtopic == "/ardrone" and not opts.video: kbctrl = KeyboardController(max_speed=0.5, cmd_period=100) if kbctrl: FlatTrim = rospy.ServiceProxy("/ardrone/flattrim", Empty()) Calibrate = rospy.ServiceProxy("/ardrone/imu_recalib", Empty()) gmain_win = frmbuf.name cv2.namedWindow(gmain_win, flags=cv2.WINDOW_OPENGL | cv2.WINDOW_NORMAL) if opts.showmatches: cv2.namedWindow(gtemplate_win, flags=cv2.WINDOW_OPENGL | cv2.WINDOW_NORMAL) smatch.MAIN_WIN = gmain_win smatch.TEMPLATE_WIN = gtemplate_win
def run(self): sensors = None datalogger = None try: # initialise objects sensors = Sensors(self.T_OFFSET,self.P_OFFSET,self.H_OFFSET) datalogger = DataLogger(self.DATALOGFILE, self.DATALOGKEYS, self.DATALOGMAXFILESIZE) self.scheduler = HeaterScheduler() self.modemanager = ModeManager( scheduler=self.scheduler, callbackfunc=self.modemanagercallbackfunc) self.mqttclient = MqttClient(subscribelist=self.SUBSCRIBELIST) self.heatercontroller = HeaterController() # initialise state self.modemanager.setMode(self.modemanager.AUTO) # initial data t_avg,p_avg,h_avg = sensors.getData() # present ourselves self.mqttclient.publish({topics.IOTHERMSTATUS:'Active'}) self.mqttclient.publish({topics.IOTHERMVERSION:__version__ +' '+ __status__}) self.mqttclient.publish({topics.MODE:self.modemanager.currentmode.name}) self.mqttclient.publish({topics.TARGETTEMPERATURE:self.modemanager.currentmode.targettemperature}) t = 0 while True: t_,p,h = sensors.getData() messages = self.mqttclient.getData() # use temperature value from mqtt if self.USE_OTHERTEMPERATURE and topics.OTHERTEMPERATURE in messages: t = float( messages[topics.OTHERTEMPERATURE] ) if not self.USE_OTHERTEMPERATURE: t = t_ # calculate averages t_avg = (t_avg + t)/2 p_avg = (p_avg + p)/2 h_avg = (h_avg + h)/2 # calculate derivatives d_t = (t - t_avg)/self.REFRESH_SECONDS d_p = (p - p_avg)/self.REFRESH_SECONDS d_h = (h - h_avg)/self.REFRESH_SECONDS # process data from subscribed topics self.processMessages( messages ) # prepare for publishing messages = self.prepareMessages(t, p, h, d_t, d_p, d_h) datalogger.log(messages) self.mqttclient.publish(messages) # update the heatercontroller with the current and target temperatures #print('targettemperature = {}'.format(self.modemanager.currentmode.targettemperature)) self.heatercontroller.update(t,self.modemanager.currentmode.targettemperature) sleep(self.REFRESH_SECONDS) finally: print('IOThermostat: Stopping IOThermostat..') if datalogger: datalogger.close() if sensors: sensors.close() if self.scheduler: self.scheduler.close() if self.heatercontroller: self.heatercontroller.close() if self.mqttclient: self.mqttclient.close()
import sys from qtpy import QtWidgets import os.path as osp dir_path = osp.split(osp.dirname(osp.realpath(__file__)))[0] sys.path.append(dir_path) # In case datalogger not # accessible in normal PYTHONPATH if __name__ == '__main__': directory = sys.argv[1] APP = QtWidgets.QApplication(sys.argv) from datalogger import DataLogger DLG = DataLogger(directory) APP.exec_()
def main(): if len(sys.argv) < 2: print 'USAGE: runtracker.py participant_id [save_location]' return S = MARKER_SIZE F = 1 participant_id = sys.argv[1] save_location = DATA_DIR if len(sys.argv) >= 3: save_location = sys.argv[2] # Mouse callback for setting origin point def mouse_callback(event, x, y, flags, param): if datalog.is_running(): return if event == cv2.EVENT_LBUTTONDOWN: # Set new origin_y tracker.set_origin(y) # Open webcam cap = cv2.VideoCapture(TRACKER_CAM) if not cap.isOpened(): print 'Error opening tracker camera!' return # Open facecam (for separate recording) facecap = None if FACE_CAM is not None: facecap = cv2.VideoCapture(FACE_CAM) if not facecap.isOpened(): print 'Error opening face camera!' facecap = None # Get video parameters (try to retain same attributes for output video) width = float(cap.get(cv2.CAP_PROP_FRAME_WIDTH)) height = float(cap.get(cv2.CAP_PROP_FRAME_HEIGHT)) fps = float(cap.get(cv2.CAP_PROP_FPS)) # Create instances track_finder = mk.MarkerFinder(GREEN_COLOR_MIN, GREEN_COLOR_MAX) tracker = WristTracker(track_finder, S, F) tracker.set_origin(int(height / 2)) cprstatus = CPRStatus(CPR_BUFFER_SIZE) statussender = StatusSender(SOCK_ADDR, SOCK_PORT) datalog = DataLogger(fps if (0 < fps <= 60) else 30, width, height, save_location, facecap is not None) trainer_on = True cur_position = 0 cur_size = 0 last_rate, last_depth, last_recoil, last_code = 0, 0, 0, 0 while True: # Get frame ret, frame = cap.read() if ret is False or frame is None: break faceframe = None if facecap: faceret, faceframe = facecap.read() # Get dimensions h, w, _ = frame.shape center = (w / 2, h / 2) ''' Output Drawing ''' # Make output image output = frame.copy() # Display program status cv2.putText( output, '<NOT RUNNING>' if not datalog.is_running() else 'RECORDING ' + datalog.get_filename() + ' [' + str(datalog.get_index()) + ']', (0, 30), cv2.FONT_HERSHEY_PLAIN, 1.5, (0, 0, 255), 2) cv2.putText( output, 'R: ' + str(last_rate) + ' D: ' + str(last_depth) + ' C: ' + str(last_recoil) + ' S: ' + str(last_code), (0, 60), cv2.FONT_HERSHEY_PLAIN, 1, (0, 0, 255), 2) # Draw center and origin lines cv2.line(output, (center[0], 0), (center[0], h), (0, 0, 255), 1) cv2.line(output, (center[0] - 20, int(tracker.get_origin())), (center[0] + 20, int(tracker.get_origin())), (0, 0, 255), 1) # Draw instructions if not datalog.is_running(): cv2.putText( output, 'c - set clr at orig | t - trainer is ' + ('on' if trainer_on else 'off') + ' | space - tgl tracker', (0, h - 4), cv2.FONT_HERSHEY_PLAIN, 1, (255, 0, 0), 1) ''' Tracking ''' # Get tracked marker from image tracked_marker = tracker.get_marker(frame, output) if tracked_marker: draw_marker(output, tracked_marker.marker, tracked_marker.size, tracked_marker.distance, tracked_marker.position) # Update cur position/size (for calibration) cur_position = tracked_marker.y cur_size = tracked_marker.size ''' Analysis ''' if datalog.is_running(): if tracked_marker: # Analyze CPR status rate, depth, recoil, code = cprstatus.update(tracked_marker) if code is not None: last_rate, last_depth, last_recoil, last_code = rate, depth, recoil, code # Update print 'R: ' + str(last_rate) + ' D: ' + str( last_depth) + ' C: ' + str(last_recoil) + ' S: ' + str( last_code) # Send status if trainer is turned on if trainer_on: if not (last_rate == 0 and last_depth == 0 and last_recoil == 0): print 'send status ' + str(code) statussender.send_status(code) datalog.log(frame, output, tracker.get_origin(), tracked_marker.position if tracked_marker else 0, rate, depth, recoil, code, faceframe) ''' Show Output ''' # Resize frame #output = cv2.resize(output, (OUT_WIDTH, OUT_HEIGHT)) # Show frame #cv2.imshow('Frame', frame) cv2.imshow('Output', output) cv2.setMouseCallback('Output', mouse_callback) if faceframe is not None: cv2.imshow('Face', faceframe) ''' Process Keypresses ''' k = cv2.waitKey(1) if k == 27: datalog.stop() break elif k == ord('v'): track_finder.set_color(VIOLET_COLOR_MIN, VIOLET_COLOR_MAX) elif k == ord('g'): track_finder.set_color(GREEN_COLOR_MIN, GREEN_COLOR_MAX) elif k == ord('y'): track_finder.set_color(YELLOW_COLOR_MIN, YELLOW_COLOR_MAX) elif k == ord('c'): # Calibrate tracker (color) color = (0, 0, 0) # Get origin pixel color pixel = frame[int(tracker.get_origin())][center[0]] hsv = cvt_bgr2hsv(pixel) print 'Calibrate color: ' + str(hsv) + ' | BGR: ' + str(pixel) # Apply HSV range ''' lower_bound = tuple(map(lambda x, y: max(0, x - y), hsv, COLOR_VARIANCE)) upper_bound = tuple(map(lambda x, y: min(255, x + y), hsv, COLOR_VARIANCE)) ''' lower_bound = (max(0, hsv[0] - COLOR_MIN[0]), COLOR_MIN[1], COLOR_MIN[2]) upper_bound = (min(255, hsv[0] + COLOR_MAX[0]), COLOR_MAX[1], COLOR_MAX[2]) #''' track_finder.set_color(lower_bound, upper_bound) print 'Color range: ' + str(lower_bound) + ' to ' + str( upper_bound) elif k == ord('t'): # Toggle trainer trainer_on = not trainer_on elif k == 32: # Toggle on/off if datalog.is_running(): datalog.stop() else: # Reset logger cur_time = datetime.now() time_str = cur_time.strftime('%m-%d-%y_%H%M%S') datalog.start('CPR_' + str(participant_id) + '_' + time_str) cprstatus.reset() # Set tracker origin/size tracker.set_origin(cur_position) tracker.set_force_px_size(cur_size) cap.release()
# Set up pressure pump switch GUI pressure_pump_switch_gui = PressurePumpUim(pressure_pump_switch, main_window_ui) # Set up picarro code GUI picarro_code_gui = PicarroCodeUim(picarro_code, main_window_ui) # Set up state GUI state_gui = StateUim(state, main_window_ui, state_ui) # Set up maintenanceTools GUI maintenancetools_gui = MaintenanceToolsUim(maintenancetools, maintenancetools_ui) # Set up Debug log GUI debuglog_gui = DebuglogUim(debug_logger, debuglog_ui, main_window.debuglog_window) ######################################################################################################################## # DATA LOGGER ######################################################################################################################## data_logger = DataLogger("data", humgen) ######################################################################################################################## # LAUNCH APPLICATION ######################################################################################################################## main_window.show() sys.exit(app.exec_())
def index(): global datalogger datalogger = DataLogger('data_%d.csv' % int(time.time())) return render_template('index.html')