def processZ(Z1, Z2, startZ, ctime, etime): Z = [] for i in range(len(Z1)): if abs(Z1[i] - Z2[i]) > 60000: Z.append(0) else: Z.append((Z1[i] + Z2[i]) / 2) prevZ = startZ % 16.0 trueZ = startZ trueZVect = [] for i in range(0, len(Z)): z = Z[i] curZ = z * (10.0 / 40961.0) if abs(prevZ - curZ) > 7: if prevZ < curZ: print("wrapped, prevZ<curZ " + str(time_now()), flush=True) trueZ = trueZ - (Zthresh - curZ + prevZ) else: print("Wrapped, prevZ>curZ " + str(time_now()), flush=True) trueZ = trueZ + (Zthresh - prevZ + curZ) print(Z1[i]) print(Z2[i]) print(prevZ) print(curZ) print(ctime) print(etime) else: trueZ = trueZ + (curZ - prevZ) trueZVect.append(trueZ) prevZ = curZ if len(trueZVect) == 0: trueZVect = [trueZ] return [trueZVect, trueZVect[len(trueZVect) - 1]]
async def run(self, parsed_args, output): # produce output four times per second # figure out how much output will be in each block rate = parsed_args.rate width = parsed_args.width data_ts = utilities.time_now() data_ts_inc = 1 / rate * 1e6 wait_time = 1 / OUTPUT_RATE BLOCK_SIZE = rate / OUTPUT_RATE fraction_remaining = 0 i = 0 # print("Starting random stream: %d elements @ %0.1fHz" % (width, rate)) while not self.stop_requested: float_block_size = BLOCK_SIZE + fraction_remaining int_block_size = int(np.floor(float_block_size)) fraction_remaining = float_block_size - int_block_size data = np.random.rand(int_block_size, width) self.avg = np.average(data) self.stddev = np.std(data) top_ts = data_ts + int_block_size * data_ts_inc ts = np.array(np.linspace(data_ts, top_ts, int_block_size, endpoint=False), dtype=np.uint64) data_ts = top_ts await output.write(np.hstack((ts[:, None], data))) await asyncio.sleep(wait_time) i += 1
async def test_data_write(self): await self.node.data_delete("/archive/data1") pipe = await self.node.data_write("/archive/data1") blk_size = 100 nblks = 1000 time = utilities.time_now() for i in range(nblks): ts = np.linspace(time, time + blk_size, blk_size) self.assertEqual(min(np.diff(ts)), 1) data1 = np.random.random((blk_size, 1)) data2 = np.random.random((blk_size, 1)) res = np.hstack((ts[:, None], data1, data2)) time = time + blk_size + 1 await pipe.write(res) await pipe.close() # now read the data back pipe = await self.node.data_read("/archive/data1", start=0) nrows = 0 while not pipe.is_empty(): data = await pipe.read() nrows += len(data) pipe.consume(len(data)) self.assertEqual(blk_size * nblks, nrows)
async def run(self, parsed_args, output): count = 0 while not self.stop_requested: await output.write(np.array([[time_now(), count]])) await asyncio.sleep(0.1) count += parsed_args.step
async def _get_current_weather(self): pipe = await self.node.data_read(self.weather_stream, start=time_now() - 60 * 60 * 1e6) try: data = await pipe.read() except EmptyPipe: return [] pipe.consume(len(data)) await pipe.close() return data['data'][-1]
async def ambient_weather(request): pipe = request.app["pipe"] keys = [ "tempf", "humidity", "hourlyrainin", "baromrelin", "windspeedmph", "windgustmph", "winddir", "solarradiation", "uv" ] data = [time_now()] for key in keys: data.append(float(request.query[key])) await pipe.write(np.array([data])) return web.Response(text="OK")
async def run(self, parsed_args, output): with open(parsed_args.file, 'r') as f: for line in f: data = np.fromstring(line, dtype=float, sep=parsed_args.delimiter) if parsed_args.timestamp: data = np.insert(data, 0, utilities.time_now()) await output.write(np.array([data])) #TODO is this necessary? await asyncio.sleep(0.1) if self.stop_requested: break
async def run(self, parsed_args, output): while True: await asyncio.sleep(0.5) self.gps.update() if not self.gps.has_fix: print('Waiting for fix...') await asyncio.sleep(1) continue await output.write(np.array([[time_now(), self.gps.latitude, self.gps.longitude]])) print("data")
async def run(self, parsed_args, output): data = 100 * np.sin(np.arange(0, 2 * np.pi, 2 * np.pi / rows)) data.shape = (rows, 1) ts_inc = 1 / rows * (1 / freq) * 1e6 # microseconds data_ts = time_now() while not self.stop_requested: top_ts = data_ts + 100 * ts_inc ts = np.array(np.linspace(data_ts, top_ts, rows, endpoint=False), dtype=np.uint64) ts.shape = (rows, 1) ts_data = np.hstack((ts, data)) await output.write(ts_data) data_ts = top_ts await asyncio.sleep(1 / freq) raise ValueError("Intentional Exception")
async def run(self, parsed_args, output): start_ts = time_now() # run 5 times per second period = 1 samples_per_period = np.round(parsed_args.rate * period) while True: end_ts = start_ts + period * 1e6 ts = np.linspace(start_ts, end_ts, samples_per_period, endpoint=False) vals = np.linspace(0, 33, samples_per_period) start_ts = end_ts chunk = np.hstack((ts[:, None], vals[:, None])) await output.write(chunk) await asyncio.sleep(period)
async def run(self, parsed_args, output): while True: req = requests.get("http://api.openweathermap.org/data/2.5/" + "weather?" + "id=%s&units=imperial&" % parsed_args.city_id + "APPID=%s" % parsed_args.api_key) resp = req.json() temp = resp['main']['temp'] pressure = resp['main']['pressure'] * 0.02953 # convert to inHg humidity = resp['main']['humidity'] windspeed = resp['wind']['speed'] winddir = resp['wind']['deg'] ts = time_now() await output.write( np.array([[ts, temp, humidity, pressure, windspeed, winddir]])) await asyncio.sleep(60) # every minute
async def run(self, parsed_args, output): start_ts = time_now() period = 1 samples_per_period = np.round(parsed_args.rate * period) while True: try: end_ts = start_ts + period * 1e6 ts = np.linspace(start_ts, end_ts, samples_per_period, endpoint=False) vals = np.linspace(0, 33, samples_per_period) start_ts = end_ts chunk = np.hstack((ts[:, None], vals[:, None])) # simulate an error if np.random.rand() < ERROR_PROBABILITY: raise ValueError await output.write(chunk) except ValueError: logging.error("simulated data interruption") await output.close_interval() await asyncio.sleep(period)
async def _get_power_data(self): one_day = 24 * 60 * 60 * 1e6 one_week = 7 * one_day time_window = one_day pipe = await self.node.data_read(self.power_stream, start=time_now() - time_window, max_rows=1000) power_data = [] offset = _utc_offset().total_seconds() * 1e3 while True: try: data = await pipe.read(flatten=True) except EmptyPipe: break pipe.consume(len(data)) power_data += [[(d[0] / 1e3) + offset, (d[1] + d[8]) / 1e3] for d in data] if pipe.end_of_interval: power_data.append(None) await pipe.close() return power_data
async def run(self, parsed_args, output): aq_device = None ctr_device = None descriptor_index = 0 interface_type = InterfaceType.USB low_encoder = 0 encoder_count = 5 sample_rate = 1000.0 # 1000 # Hz samples_per_channel = 10000 # 10000 scan_options = ScanOption.CONTINUOUS scan_flags = CInScanFlag.DEFAULT encoder_type = CounterMeasurementType.ENCODER encoder_mode = CounterMeasurementMode.ENCODER_X4 edge_detection = CounterEdgeDetection.RISING_EDGE tick_size = CounterTickSize.TICK_20ns debounce_mode = CounterDebounceMode.TRIGGER_AFTER_STABLE debounce_time = CounterDebounceTime.DEBOUNCE_7500ns config_flags = CConfigScanFlag.DEFAULT daq_device = None try: # Get descriptors for all of the available DAQ devices. devices = get_daq_device_inventory(interface_type) number_of_devices = len(devices) if number_of_devices == 0: raise ValueError('Error: No DAQ devices found') # Create the DAQ device object associated with the specified descriptor index. daq_device = DaqDevice(devices[descriptor_index]) # Get the CtrDevice object and verify that it is valid. ctr_device = daq_device.get_ctr_device() if ctr_device is None: raise ValueError( 'Error: The DAQ device does not support counters') # Verify that the specified device supports hardware pacing for counters. ctr_info = ctr_device.get_info() if not ctr_info.has_pacer(): raise ValueError( 'Error: The specified DAQ device does not support hardware paced counter input' ) # Establish a connection to the DAQ device. descriptor = daq_device.get_descriptor() daq_device.connect() # Get the encoder counter channels. encoder_counters = get_supported_encoder_counters(ctr_info) if len(encoder_counters) == 0: raise ValueError( 'Error: The specified DAQ device does not support encoder channels' ) # Verify that the low_encoder number is valid. first_encoder = encoder_counters[0] if low_encoder < first_encoder: low_encoder = first_encoder if low_encoder > first_encoder + len(encoder_counters) - 1: low_encoder = first_encoder # Verify that the encoder count is valid. if encoder_count > len(encoder_counters): encoder_count = len(encoder_counters) # Set the high_encoder channel. high_encoder = low_encoder + encoder_count - 1 if high_encoder > first_encoder + len(encoder_counters) - 1: high_encoder = first_encoder + len(encoder_counters) - 1 # update the actual number of encoders being used encoder_count = high_encoder - low_encoder + 1 # Clear the counter, and configure the counter as an encoder. for encoder in range(low_encoder, high_encoder + 1): ctr_device.c_config_scan(encoder, encoder_type, encoder_mode, edge_detection, tick_size, debounce_mode, debounce_time, config_flags) # Allocate a buffer to receive the data. data = create_int_buffer(encoder_count, samples_per_channel) # Start the scan ctr_device.c_in_scan(low_encoder, high_encoder, samples_per_channel, sample_rate, scan_options, scan_flags, data) # prev_samples_per_channel = 0 # cur_samples_per_channel = 0 prev_index = 0 XVect = [] YVect = [] ZVect = [] EVect = [] AllVect = [] startX = 0.0 startY = 0.0 startZ = 0.0 startE = 0.0 # newZ = [] prev_index = 0 start_time = time_now() cur_time = start_time + 1 while not self.stop_requested: status, transfer_status = ctr_device.get_scan_status() # not sure if can await the above line index = transfer_status.current_index # print(index) # edge starting condition if index == -1: AllVect = [0.0, 0.0, 0.0, 0.0, 0.0] # normal condition else: # has not looped around if prev_index < index: AllVect = data[prev_index:index] # has wrapped around else: AllVect = data[prev_index:] + data[0:index] prev_index = index # print(AllVect) XVect = AllVect[3::5] YVect = AllVect[1::5] Z1Vect = AllVect[0::5] Z2Vect = AllVect[2::5] EVect = AllVect[4::5] end_time = cur_time + (len(XVect) - 1) * 1e3 # cur_time is in us [XVect, startX] = processX(XVect, startX) [YVect, startY] = processY(YVect, startY) [EVect, startE] = processE(EVect, startE) [ZVect, startZ] = processZ(Z1Vect, Z2Vect, startZ, cur_time, end_time) Xarray = np.array(XVect) Yarray = np.array(YVect) Zarray = np.array(ZVect) Earray = np.array(EVect) * -1 #invert for positive extrusion Xarray = np.vstack(Xarray) Yarray = np.vstack(Yarray) Zarray = np.vstack(Zarray) Earray = np.vstack(Earray) time_array = np.linspace(cur_time, end_time, len(XVect)) time_array = np.vstack(time_array) # print("before all output") All_Output = np.hstack( (time_array, Xarray, Yarray, Zarray, Earray)) # print("after all output") # print(All_Output) await output.write(np.array(All_Output)) # print("after writing") # print(len(YVect)) # for i in range(len(YVect)): # print(str(YVect[i])+ "\t"+str(newY[i])) await asyncio.sleep(1) cur_time = end_time + 1e3 if status != ScanStatus.RUNNING: break # except (ValueError, NameError, SyntaxError): # break # print("next while loop cycle") except ValueError as e: print(str(e)) except Exception as e: raise e finally: if daq_device: if ctr_device: ctr_device.scan_stop() if daq_device.is_connected(): daq_device.disconnect() daq_device.release()
async def run(self, parsed_args, output): while True: value = np.random.rand() # data from sensor await output.write(np.array([[time_now(), value]])) await asyncio.sleep(1)