def manageContinuousUpdate(self, x): if x.getShape().size == 1: new_max_x = x.data()[-1] else: new_max_x = x.data()[0][-1] curr_max_x = float(self.args['curr_max_x' + x_idx_s][-1]) displayed_width = float(self.args['displayed_width'][-1]) default_num_samples = float(self.args['default_num_samples'][-1]) num_samples = int( (new_max_x - curr_max_x) * default_num_samples / displayed_width) if new_max_x > curr_max_x and num_samples > 1: if issubclass(x.dtype.type, integer): min_req_x = floor(curr_max_x + 1) else: min_req_x = curr_max_x + finfo(new_max_x).eps update_function = 'MdsMisc->GetXYWave:DSC' infinity = sys.float_info.max if x.__class__.__name__ == 'Uint64Array' or x.__class__.__name__ == 'Int64Array': update_function = 'MdsMisc->GetXYWaveLongTimes:DSC' min_req_x = str(int(min_req_x)) + 'QU' infinity = str((1 << 63) - 1) + 'QU' sig = Data.execute( update_function + '(\'' + expr.replace('\\', '\\\\').replace('\'', '\\\'') + '\',' + str(min_req_x) + ',' + str(infinity) + ',' + str(num_samples) + ')') return makeData(sig.dim_of().data()), makeData(sig.data()) else: return Float64Array([]), Float64Array([])
def store(self): self.restoreInfo() vmeAddress = 0 # Stop device status = CAENDT5720.caenLib.CAENVME_WriteCycle( self.handle, c_int(vmeAddress + 0x8100), byref(c_int(0)), c_int(self.cvA32_S_DATA), c_int(self.cvD32)) if status != 0: Data.execute('DevLogErr($1,$2)', self.getNid(), 'Error stopping device') raise mdsExceptions.TclFAILED_ESSENTIAL #need to wait a while sleep(0.1) acqMode = self.acq_mode.data() if acqMode == 'CONTINUOUS' or acqMode == 'CONTINUOUS WITH COUNTER': self.stop_store(0) return try: clock = self.clock_source.evaluate() dt = clock.getDelta().data() except: Data.execute('DevLogErr($1,$2)', self.getNid(), 'Error evaluating clock source') raise mdsExceptions.TclFAILED_ESSENTIAL try: trig = self.trig_source.data() except: Data.execute('DevLogErr($1,$2)', self.getNid(), 'Error evaluating trigger source') raise mdsExceptions.TclFAILED_ESSENTIAL try: startIdx = self.start_idx.data() endIdx = self.end_idx.data() except: Data.execute('DevLogErr($1,$2)', self.getNid(), 'Error evaluating start or end idx') raise mdsExceptions.TclFAILED_ESSENTIAL try: pts = self.pts.data() except: Data.execute('DevLogErr($1,$2)', self.getNid(), 'Error evaluating Post Trigger Samples') raise mdsExceptions.TclFAILED_ESSENTIAL # Read number of buffers actSegments = c_int(0) status = CAENDT5720.caenLib.CAENVME_ReadCycle( self.handle, c_int(vmeAddress + 0x812C), byref(actSegments), c_int(self.cvA32_S_DATA), c_int(self.cvD32)) if status != 0: Data.execute('DevLogErr($1,$2)', self.getNid(), 'Error reading number of acquired segments') raise mdsExceptions.TclFAILED_ESSENTIAL if actSegments.value == 0: return #Compute Segment Size try: nSegments = self.num_segments.data() segmentSamples = 1048576 / nSegments except: Data.execute('DevLogErr($1,$2)', self.getNid(), 'Error reading max number of segments') raise mdsExceptions.TclFAILED_ESSENTIAL # Get Active channels chanMask = c_int(0) status = CAENDT5720.caenLib.CAENVME_ReadCycle( self.handle, c_int(vmeAddress + 0x8120), byref(chanMask), c_int(self.cvA32_S_DATA), c_int(self.cvD32)) nActChans = 0 chanMask = chanMask.value numChannels = self.num_channels.data() for chan in range(0, numChannels): if (chanMask & (1 << chan)) != 0: nActChans = nActChans + 1 if nActChans == 0: print('No active groups') return segmentSize = 16 + 2 * segmentSamples * nActChans class DT5720Data(Structure): _fields_ = [("eventSize", c_int), ("boardGroup", c_int), ("counter", c_int), ("time", c_int), ("data", c_short * (segmentSize / 2))] actSegments = actSegments.value currStartIdx = segmentSamples - pts + startIdx currEndIdx = segmentSamples - pts + endIdx currChanSamples = Int32(currEndIdx - currStartIdx + 0.5).data() triggers = [] deltas = [] channels = [None] * numChannels for chan in range(numChannels): channels[chan] = ndarray(currChanSamples * actSegments) for segmentIdx in range(0, actSegments): segment = DT5720Data() retLen = c_int(0) status = CAENDT5720.caenLib.CAENVME_FIFOBLTReadCycle( self.handle, c_int(vmeAddress), byref(segment), c_int(segmentSize), c_int(self.cvA32_S_DATA), c_int(self.cvD64), byref(retLen)) if status != 0: Data.execute('DevLogErr($1,$2)', self.getNid(), 'Error reading data segment') raise mdsExceptions.TclFAILED_ESSENTIAL actSize = 4 * (segment.eventSize & 0x0fffffff) if actSize != segmentSize: Data.execute( 'DevLogErr($1,$2)', self.getNid(), 'Expected event size different from expected size') raise mdsExceptions.TclFAILED_ESSENTIAL counter = segment.time / 2 triggers.append(counter * dt) deltas.append(dt) sizeInInts = (segment.eventSize & 0x0fffffff) - 4 chanSizeInInts = sizeInInts / nActChans chanSizeInShorts = chanSizeInInts * 2 #chanOffset = 0 for chan in range(numChannels): if (chanMask & (1 << chan)) != 0: channels[chan][segmentIdx * currChanSamples:segmentIdx * currChanSamples + currEndIdx - currStartIdx] = segment.data[ chan * chanSizeInShorts + currStartIdx:chan * chanSizeInShorts + currEndIdx] #endfor chan in range(numChannels) #endfor segmentIdx in range(actSegments): if len(self.trig_source.getShape()) > 0: dim = Dimension( Window(startIdx, endIdx + (actSegments - 1) * (endIdx - startIdx), trig[0]), Range( Float64Array(trig) + Float64(startIdx * dt), Float64Array(trig) + Float64(endIdx * dt), Float64Array(deltas))) else: dim = Dimension( Window(startIdx, endIdx + (actSegments - 1) * (endIdx - startIdx), trig), Range( Float64Array(triggers) - Float64(triggers[0]) + Float64(trig) + Float64(startIdx * dt), Float64Array(triggers) - Float64(triggers[0]) + Float64(trig) + Float64(endIdx * dt), Float64Array(deltas))) dim.setUnits("s") for chan in range(numChannels): if (chanMask & (1 << chan)) != 0: try: offset = getattr(self, 'channel_%d_offset' % (chan + 1)) except: Data.execute('DevLogErr($1,$2)', self.getNid(), 'Error reading channel offset') raise mdsExceptions.TclFAILED_ESSENTIAL raw = Int16Array(channels[chan]) raw.setUnits("counts") data = Data.compile("2*($VALUE - 2048)/4096.+$1", offset) data.setUnits("Volts") signal = Signal(data, raw, dim) try: getattr(self, 'channel_%d_data' % (chan + 1)).putData(signal) except: Data.execute('DevLogErr($1,$2)', self.getNid(), 'Cannot write Signal in tree') raise mdsExceptions.TclFAILED_ESSENTIAL #endfor chan in range(numChannels) return
def doFrame(self): def getStringExp(self, name, response_headers): if name in self.args: try: response_headers.append( (name, str(Data.execute(self.args[name][-1]).data()))) except Exception: response_headers.append((name, str(sys.exc_info()))) response_headers = list() response_headers.append( ('Cache-Control', 'no-store, no-cache, must-revalidate')) response_headers.append(('Pragma', 'no-cache')) response_headers.append(('Content-Type', 'application/octet-stream')) if 'tree' in self.args: Tree.usePrivateCtx() try: t = Tree(self.args['tree'][-1], int(self.args['shot'][-1].split(',')[0])) except: response_headers.append(('ERROR', 'Error opening tree')) for name in ('title', 'xlabel', 'ylabel'): getStringExp(self, name, response_headers) if 'frame_idx' in self.args: frame_idx = self.args['frame_idx'][-1] else: frame_idx = '0' expr = self.args['y'][-1] try: sig = Data.execute('GetSegment(' + expr + ',' + frame_idx + ')') frame_data = makeData(sig.data()) except Exception: response_headers.append( ('ERROR', 'Error evaluating expression: "%s", error: %s' % (expr, sys.exc_info()))) if 'init' in self.args: if 'x' in self.args: expr = self.args['x'][-1] try: times = Data.execute(expr) times = makeData(times.data()) except Exception: response_headers.append( ('ERROR', 'Error evaluating expression: "%s", error: %s' % (expr, sys.exc_info()))) else: try: #times = Data.execute('dim_of(' + expr + ')') times = list() numSegments = Data.execute('GetNumSegments(' + expr + ')').data() for i in range(0, numSegments): times.append( Data.execute('GetSegmentLimits(' + expr + ',' + str(i) + ')').data()[0]) times = Float64Array(times) except Exception: response_headers.append( ('ERROR', 'Error getting x axis of: "%s", error: %s' % (expr, sys.exc_info()))) response_headers.append(('FRAME_WIDTH', str(sig.getShape()[0]))) response_headers.append(('FRAME_HEIGHT', str(sig.getShape()[1]))) response_headers.append( ('FRAME_BYTES_PER_PIXEL', str(frame_data.data().itemsize))) response_headers.append(('FRAME_LENGTH', str(len(frame_data)))) output = str(frame_data.data().data) if 'init' in self.args: response_headers.append(('TIMES_DATATYPE', times.__class__.__name__)) response_headers.append(('TIMES_LENGTH', str(len(times)))) output = output + str(times.data().data) status = '200 OK' return (status, response_headers, output)
def doScopepanel(self): def getStringExp(self, name, response_headers): if name in self.args: try: response_headers.append( (name, str(Data.execute(self.args[name][-1]).data()))) except Exception: response_headers.append( (name + '_error:', str(sys.exc_info()))) def manageContinuousUpdate(self, x): if x.getShape().size == 1: new_max_x = x.data()[-1] else: new_max_x = x.data()[0][-1] curr_max_x = float(self.args['curr_max_x' + x_idx_s][-1]) displayed_width = float(self.args['displayed_width'][-1]) default_num_samples = float(self.args['default_num_samples'][-1]) num_samples = int( (new_max_x - curr_max_x) * default_num_samples / displayed_width) if new_max_x > curr_max_x and num_samples > 1: if issubclass(x.dtype.type, integer): min_req_x = floor(curr_max_x + 1) else: min_req_x = curr_max_x + finfo(new_max_x).eps update_function = 'MdsMisc->GetXYWave:DSC' infinity = sys.float_info.max if x.__class__.__name__ == 'Uint64Array' or x.__class__.__name__ == 'Int64Array': update_function = 'MdsMisc->GetXYWaveLongTimes:DSC' min_req_x = str(int(min_req_x)) + 'QU' infinity = str((1 << 63) - 1) + 'QU' sig = Data.execute( update_function + '(\'' + expr.replace('\\', '\\\\').replace('\'', '\\\'') + '\',' + str(min_req_x) + ',' + str(infinity) + ',' + str(num_samples) + ')') return makeData(sig.dim_of().data()), makeData(sig.data()) else: return Float64Array([]), Float64Array([]) response_headers = list() response_headers.append( ('Cache-Control', 'no-store, no-cache, must-revalidate')) response_headers.append(('Pragma', 'no-cache')) if 'tree' in self.args: Tree.usePrivateCtx() try: t = Tree(self.args['tree'][-1], int(self.args['shot'][-1].split(',')[0])) except: pass for name in ('title', 'xlabel', 'ylabel', 'xmin', 'xmax', 'ymin', 'ymax'): getStringExp(self, name, response_headers) continuous_update = 'continuous_update' in self.args sig_idx = 0 output = '' if 'tree' in self.args: shots = self.args['shot'][-1].split(',') for shot in shots: y_idx = 1 y_idx_s = '%d' % (y_idx, ) while 'y' + y_idx_s in self.args: x_idx_s = y_idx_s sig_idx = sig_idx + 1 sig_idx_s = '%d' % (sig_idx, ) expr = self.args['y' + y_idx_s][-1] y_idx = y_idx + 1 y_idx_s = '%d' % (y_idx, ) try: t = Tree(self.args['tree'][-1], int(shot)) response_headers.append(('SHOT' + sig_idx_s, str(t.shot))) except Exception: response_headers.append( ('ERROR' + sig_idx_s, 'Error opening tree %s, shot %s, error: %s' % (self.args['tree'][-1], shot, sys.exc_info()))) continue if 'default_node' in self.args: try: t.setDefault(t.getNode(self.args['default_node'][-1])) except Exception: response_headers.append(( 'ERROR' + sig_idx_s, 'Error setting default to %s in tree %s, shot %s, error: %s' % (self.args['default_node'][-1], self.args['tree'][-1], shot, sys.exc_info()))) continue try: sig = Data.execute(expr) if not continuous_update: y = makeData(sig.data()) except Exception: response_headers.append( ('ERROR' + sig_idx_s, 'Error evaluating expression: "%s", error: %s' % (expr, sys.exc_info()))) continue if 'x' + x_idx_s in self.args: expr = self.args['x' + x_idx_s][-1] try: x = Data.execute(expr) x = makeData(x.data()) if continuous_update: x, y = manageContinuousUpdate(self, x) except Exception: response_headers.append( ('ERROR' + sig_idx_s, 'Error evaluating expression: "%s", error: %s' % (expr, sys.exc_info()))) continue else: try: x = makeData(sig.dim_of().data()) if continuous_update: x, y = manageContinuousUpdate(self, x) except Exception: response_headers.append( ('ERROR' + sig_idx_s, 'Error getting x axis of %s:, error: %s' % (expr, sys.exc_info()))) continue if x.__class__.__name__ == 'Uint64Array' or x.__class__.__name__ == 'Int64Array': x = Float64Array( x) # Javascript doesn't support 64 bit integers response_headers.append(('X_IS_DATETIME', 'true')) response_headers.append( ('X' + sig_idx_s + '_DATATYPE', x.__class__.__name__)) response_headers.append( ('Y' + sig_idx_s + '_DATATYPE', y.__class__.__name__)) response_headers.append( ('X' + sig_idx_s + '_LENGTH', str(len(x)))) response_headers.append( ('Y' + sig_idx_s + '_LENGTH', str(len(y)))) output = output + str(x.data().data) + str(y.data().data) else: y_idx = 1 y_idx_s = '%d' % (y_idx, ) while 'y' + y_idx_s in self.args: x_idx_s = y_idx_s expr = self.args['y' + y_idx_s][-1] y_idx = y_idx + 1 y_idx_s = '%d' % (y_idx, ) sig_idx = sig_idx + 1 sig_idx_s = '%d' % (sig_idx, ) try: sig = Data.execute(expr) if not continuous_update: y = makeData(sig.data()) except Exception: response_headers.append( ('ERROR' + sig_idx_s, 'Error evaluating expression: "%s", error: %s' % (expr, sys.exc_info()))) continue if 'x' + x_idx_s in self.args: expr = self.args['x' + x_idx_s][-1] try: x = Data.execute(expr) x = makeData(x.data()) if continuous_update: x, y = manageContinuousUpdate(self, x) except Exception: response_headers.append( ('ERROR' + sig_idx_s, 'Error evaluating expression: "%s", error: %s' % (expr, sys.exc_info()))) continue else: try: x = makeData(sig.dim_of().data()) if continuous_update: x, y = manageContinuousUpdate(self, x) except Exception: response_headers.append( ('ERROR' + sig_idx_s, 'Error getting x axis of %s: "%s", error: %s' % (expr, sys.exc_info()))) continue if x.__class__.__name__ == 'Uint64Array' or x.__class__.__name__ == 'Int64Array': x = Float64Array( x) # Javascript doesn't support 64 bit integers response_headers.append(('X_IS_DATETIME', 'true')) response_headers.append( ('X' + sig_idx_s + '_DATATYPE', x.__class__.__name__)) response_headers.append( ('Y' + sig_idx_s + '_DATATYPE', y.__class__.__name__)) response_headers.append(('X' + sig_idx_s + '_LENGTH', str(len(x)))) response_headers.append(('Y' + sig_idx_s + '_LENGTH', str(len(y)))) output = output + str(x.data().data) + str(y.data().data) return ('200 OK', response_headers, output)
def store(self): if CAENV1740.caenLib is None: CAENV1740.caenLib = CDLL("libCAENVME.so") handle = c_long(0) status = CAENV1740.caenLib.CAENVME_Init(c_int(self.cvV2718), c_int(0), c_int(0), byref(handle)) if status != 0: print('Error initializing CAENVME') raise mdsExceptions.TclFAILED_ESSENTIAL try: # except line 508 boardId = self.board_id.data() if Device.debug: print('BOARD ID: ', boardId) vmeAddress = self.vme_address.data() if Device.debug: print('VME ADDRESS: ', vmeAddress) try: clock = self.clock_source.evaluate() dt = clock.getDelta().data() except: print('Error evaluating clock source') CAENV1740.caenLib.CAENVME_End(handle) raise mdsExceptions.TclFAILED_ESSENTIAL try: trig = self.trig_source.data() except: print('Error evaluating trigger source') CAENV1740.caenLib.CAENVME_End(handle) raise mdsExceptions.TclFAILED_ESSENTIAL try: startIdx = self.start_idx.data() endIdx = self.end_idx.data() except: print('Error evaluating start or end idx') CAENV1740.caenLib.CAENVME_End(handle) raise mdsExceptions.TclFAILED_ESSENTIAL try: pts = self.pts.data() except: print('Error evaluating Post Trigger Samples') CAENV1740.caenLib.CAENVME_End(handle) raise mdsExceptions.TclFAILED_ESSENTIAL # Stop device status = CAENV1740.caenLib.CAENVME_WriteCycle( handle, c_int(vmeAddress + 0x8100), byref(c_int(0)), c_int(self.cvA32_S_DATA), c_int(self.cvD32)) if status != 0: print('Error stopping device') CAENV1740.caenLib.CAENVME_End(handle) raise mdsExceptions.TclFAILED_ESSENTIAL #need to wait a while sleep(0.1) # Read number of buffers */ actSegments = c_int(0) status = CAENV1740.caenLib.CAENVME_ReadCycle( handle, c_int(vmeAddress + 0x812C), byref(actSegments), c_int(self.cvA32_S_DATA), c_int(self.cvD32)) if status != 0: print('Error reading number of acquired segments') CAENV1740.caenLib.CAENVME_End(handle) raise mdsExceptions.TclFAILED_ESSENTIAL if Device.debug: print('Acquired segments: ', actSegments.value) if actSegments.value == 0: CAENV1740.caenLib.CAENVME_End(handle) return #Compute Segment Size try: nSegments = self.num_segments.data() segmentSamples = 196608 / nSegments print('Segment samples: ', segmentSamples) except: print('Error reading max number of segments') CAENV1740.caenLib.CAENVME_End(handle) raise mdsExceptions.TclFAILED_ESSENTIAL # Get Active groups groupMask = c_int(0) status = CAENV1740.caenLib.CAENVME_ReadCycle( handle, c_int(vmeAddress + 0x8120), byref(groupMask), c_int(self.cvA32_S_DATA), c_int(self.cvD32)) nActGroups = 0 groupMask = groupMask.value for group in range(0, 8): if (groupMask & (1 << group)) != 0: nActGroups = nActGroups + 1 if nActGroups == 0: if Device.debug: print('No active groups') CAENV1740.caenLib.CAENVME_End(handle) return segmentSize = 16 + segmentSamples * nActGroups * 8 * 12 / 8 class V1740Data(Structure): _fields_ = [("eventSize", c_int), ("boardGroup", c_int), ("counter", c_int), ("time", c_int), ("data", c_int * (segmentSamples * 64 * 12 / (8 * 4)))] actSegments = actSegments.value currStartIdx = segmentSamples - pts + startIdx currEndIdx = segmentSamples - pts + endIdx DataArray = c_short * ( (currEndIdx - currStartIdx + 1) * actSegments) triggers = [] deltas = [] channels = [] for chan in range(0, 64): channels.append([]) for chan in range(0, 64): channels[chan] = DataArray() c = [] for i in range(0, 64): c.append(0) for sample in range(0, actSegments): segment = V1740Data() retLen = c_int(0) status = CAENV1740.caenLib.CAENVME_FIFOBLTReadCycle( handle, c_int(vmeAddress), byref(segment), c_int(segmentSize), c_int(self.cvA32_S_DATA), c_int(self.cvD64), byref(retLen)) if status != 0: print('Error reading data segment') CAENV1740.caenLib.CAENVME_End(handle) raise mdsExceptions.TclFAILED_ESSENTIAL actSize = 4 * (segment.eventSize & 0x0fffffff) if actSize != segmentSize: print('Expected event size different from expected size') CAENV1740.caenLib.CAENVME_End(handle) raise mdsExceptions.TclFAILED_ESSENTIAL counter = segment.time / 2 triggers.append(counter * dt) deltas.append(dt) sizeInInts = (segment.eventSize & 0x0fffffff) - 4 groupSize = sizeInInts / nActGroups groupOffset = 0 for group in range(0, 8): if (groupMask & (1 << group)) != 0: rpnt = 0 sampleCount = 0 while rpnt < groupSize: if rpnt % 9 == 0: if sampleCount >= currStartIdx and sampleCount <= currEndIdx: channels[group * 8][ c[group * 8 + 0]] = segment.data[groupOffset + rpnt] & 0x00000FFF c[group * 8 + 0] = c[group * 8 + 0] + 1 if sampleCount + 1 >= currStartIdx and sampleCount + 1 <= currEndIdx: channels[group * 8][c[group * 8 + 0]] = ( segment.data[groupOffset + rpnt] & 0x00FFF000) >> 12 c[group * 8 + 0] = c[group * 8 + 0] + 1 if sampleCount + 2 >= currStartIdx and sampleCount + 2 <= currEndIdx: channels[group * 8][c[group * 8 + 0]] = ( (segment.data[groupOffset + rpnt] & 0xFF000000) >> 24 ) | ((segment.data[groupOffset + rpnt + 1] & 0x0000000F) << 8) c[group * 8 + 0] = c[group * 8 + 0] + 1 if rpnt % 9 == 1: if sampleCount >= currStartIdx and sampleCount <= currEndIdx: channels[group * 8 + 1][c[ group * 8 + 1]] = (segment.data[groupOffset + rpnt] & 0x0000FFF0) >> 4 c[group * 8 + 1] = c[group * 8 + 1] + 1 if sampleCount + 1 >= currStartIdx and sampleCount + 1 <= currEndIdx: channels[group * 8 + 1][c[ group * 8 + 1]] = (segment.data[groupOffset + rpnt] & 0x0FFF0000) >> 16 c[group * 8 + 1] = c[group * 8 + 1] + 1 if sampleCount + 2 >= currStartIdx and sampleCount + 2 <= currEndIdx: channels[group * 8 + 1][c[ group * 8 + 1]] = ( (segment.data[groupOffset + rpnt] & 0xF0000000) >> 28) | ( (segment.data[groupOffset + rpnt + 1] & 0x000000FF) << 4) c[group * 8 + 1] = c[group * 8 + 1] + 1 if rpnt % 9 == 2: if sampleCount >= currStartIdx and sampleCount <= currEndIdx: channels[group * 8 + 2][c[ group * 8 + 2]] = (segment.data[groupOffset + rpnt] & 0x000FFF00) >> 8 c[group * 8 + 2] = c[group * 8 + 2] + 1 if sampleCount + 1 >= currStartIdx and sampleCount + 1 <= currEndIdx: channels[group * 8 + 2][c[ group * 8 + 2]] = (segment.data[groupOffset + rpnt] & 0xFFF00000) >> 20 c[group * 8 + 2] = c[group * 8 + 2] + 1 if rpnt % 9 == 3: if sampleCount + 2 >= currStartIdx and sampleCount + 2 <= currEndIdx: channels[group * 8 + 2][ c[group * 8 + 2]] = segment.data[groupOffset + rpnt] & 0x00000FFF c[group * 8 + 2] = c[group * 8 + 2] + 1 if sampleCount >= currStartIdx and sampleCount <= currEndIdx: channels[group * 8 + 3][c[ group * 8 + 3]] = (segment.data[groupOffset + rpnt] & 0x00FFF000) >> 12 c[group * 8 + 3] = c[group * 8 + 3] + 1 if sampleCount + 1 >= currStartIdx and sampleCount + 1 <= currEndIdx: channels[group * 8 + 3][c[ group * 8 + 3]] = ( (segment.data[groupOffset + rpnt] & 0xFF000000) >> 24) | ( (segment.data[groupOffset + rpnt + 1] & 0x0000000F) << 8) c[group * 8 + 3] = c[group * 8 + 3] + 1 if rpnt % 9 == 4: if sampleCount + 2 >= currStartIdx and sampleCount + 2 <= currEndIdx: channels[group * 8 + 3][c[ group * 8 + 3]] = (segment.data[groupOffset + rpnt] & 0x0000FFF0) >> 4 c[group * 8 + 3] = c[group * 8 + 3] + 1 if sampleCount >= currStartIdx and sampleCount <= currEndIdx: channels[group * 8 + 4][c[ group * 8 + 4]] = (segment.data[groupOffset + rpnt] & 0x0FFF0000) >> 16 c[group * 8 + 4] = c[group * 8 + 4] + 1 if sampleCount + 1 >= currStartIdx and sampleCount + 1 <= currEndIdx: channels[group * 8 + 4][c[ group * 8 + 4]] = ( (segment.data[groupOffset + rpnt] & 0xF0000000) >> 28) | ( (segment.data[groupOffset + rpnt + 1] & 0x000000FF) << 4) c[group * 8 + 4] = c[group * 8 + 4] + 1 if rpnt % 9 == 5: if sampleCount + 2 >= currStartIdx and sampleCount + 2 <= currEndIdx: channels[group * 8 + 4][c[ group * 8 + 4]] = (segment.data[groupOffset + rpnt] & 0x000FFF00) >> 8 c[group * 8 + 4] = c[group * 8 + 4] + 1 if sampleCount >= currStartIdx and sampleCount <= currEndIdx: channels[group * 8 + 5][c[ group * 8 + 5]] = (segment.data[groupOffset + rpnt] & 0xFFF00000) >> 20 c[group * 8 + 5] = c[group * 8 + 5] + 1 if rpnt % 9 == 6: if sampleCount + 1 >= currStartIdx and sampleCount + 1 <= currEndIdx: channels[group * 8 + 5][ c[group * 8 + 5]] = segment.data[groupOffset + rpnt] & 0x00000FFF c[group * 8 + 5] = c[group * 8 + 5] + 1 if sampleCount + 2 >= currStartIdx and sampleCount + 2 <= currEndIdx: channels[group * 8 + 5][c[ group * 8 + 5]] = (segment.data[groupOffset + rpnt] & 0x00FFF000) >> 12 c[group * 8 + 5] = c[group * 8 + 5] + 1 if sampleCount >= currStartIdx and sampleCount <= currEndIdx: channels[group * 8 + 6][c[ group * 8 + 6]] = ( (segment.data[groupOffset + rpnt] & 0xFF000000) >> 24) | ( (segment.data[groupOffset + rpnt + 1] & 0x0000000F) << 8) c[group * 8 + 6] = c[group * 8 + 6] + 1 if rpnt % 9 == 7: if sampleCount + 1 >= currStartIdx and sampleCount + 1 <= currEndIdx: channels[group * 8 + 6][c[ group * 8 + 6]] = (segment.data[groupOffset + rpnt] & 0x0000FFF0) >> 4 c[group * 8 + 6] = c[group * 8 + 6] + 1 if sampleCount + 2 >= currStartIdx and sampleCount + 2 <= currEndIdx: channels[group * 8 + 6][c[ group * 8 + 6]] = (segment.data[groupOffset + rpnt] & 0x0FFF0000) >> 16 c[group * 8 + 6] = c[group * 8 + 6] + 1 if sampleCount >= currStartIdx and sampleCount <= currEndIdx: channels[group * 8 + 7][c[ group * 8 + 7]] = ( (segment.data[groupOffset + rpnt] & 0xF0000000) >> 28) | ( (segment.data[groupOffset + rpnt + 1] & 0x000000FF) << 4) c[group * 8 + 7] = c[group * 8 + 7] + 1 if rpnt % 9 == 8: if sampleCount + 1 >= currStartIdx and sampleCount + 1 <= currEndIdx: channels[group * 8 + 7][c[ group * 8 + 7]] = (segment.data[groupOffset + rpnt] & 0x000FFF00) >> 8 c[group * 8 + 7] = c[group * 8 + 7] + 1 if sampleCount + 2 >= currStartIdx and sampleCount + 2 <= currEndIdx: channels[group * 8 + 7][c[ group * 8 + 7]] = (segment.data[groupOffset + rpnt] & 0xFFF00000) >> 20 c[group * 8 + 7] = c[group * 8 + 7] + 1 if rpnt % 9 == 8: sampleCount = sampleCount + 3 rpnt = rpnt + 1 #endwhile groupOffset = groupOffset + groupSize #endif #endfor group in range(0:8) #endfor samples in range(0, actSegments) if len(self.trig_source.getShape()) > 0: dim = Dimension( Window(startIdx, endIdx + (actSegments - 1) * (endIdx - startIdx), trig[0]), Range( Float64Array(trig) + Float64(startIdx * dt), Float64Array(trig) + Float64(endIdx * dt), Float64Array(deltas))) else: dim = Dimension( Window(startIdx, endIdx + (actSegments - 1) * (endIdx - startIdx), trig), Range( Float64Array(triggers) - Float64(triggers[0]) + Float64(trig) + Float64(startIdx * dt), Float64Array(triggers) - Float64(triggers[0]) + Float64(trig) + Float64(endIdx * dt), Float64Array(deltas))) print('DIM: ', dim) dim.setUnits("s") for group in range(0, 8): if groupMask & (1 << group): try: offset = self.__dict__['group_%d_offset' % (group + 1)].data() except: print('Error evaluating group offset') CAENV1740.caenLib.CAENVME_End(handle) raise mdsExceptions.TclFAILED_ESSENTIAL for chan in range(0, 8): raw = Int16Array(channels[group * 8 + chan]) raw.setUnits("counts") data = Data.compile("2*($VALUE - 2048)/4096.+$1", offset) data.setUnits("Volts") signal = Signal(data, raw, dim) try: self.__dict__['data%02d' % ((group + 1) * (chan + 1), )].putData(signal) except: print('Cannot write Signal in the tree') CAENV1740.caenLib.CAENVME_End(handle) raise mdsExceptions.TclFAILED_ESSENTIAL #endfor #endif #endfor CAENV1740.caenLib.CAENVME_End(handle) return except: # try line 258 print('Generic Error') CAENV1740.caenLib.CAENVME_End(handle) raise mdsExceptions.TclFAILED_ESSENTIAL
def store(self,arg): from MDSplus import Tree, TreeNode, Int16Array, Float64Array, Int32, Int64, Float32, Float64, Signal, Data, Dimension, Window, Range from ctypes import CDLL, c_int, c_short, c_long, byref, Structure import time caenLib = CDLL("libCAENVME.so") handle = c_long(0) status = caenLib.CAENVME_Init(c_int(self.cvV2718), c_int(0), c_int(0), byref(handle)) if status != 0: print 'Error initializing CAENVME' return 0 try: baseNid = self.node.getNid() boardId = TreeNode(baseNid + self.N_BOARD_ID).data() print 'BOARD ID: ', boardId vmeAddress = TreeNode(baseNid + self.N_VME_ADDRESS).data() print 'VME ADDRESS: ', vmeAddress try: clock = TreeNode(baseNid + self.N_CLOCK_SOURCE).evaluate() dt = clock.getDelta().data() except: print 'Error evaluating clock source' caenLib.CAENVME_End(handle) return 0 try: trig = TreeNode(baseNid + self.N_TRIG_SOURCE).data() except: print 'Error evaluating trigger source' caenLib.CAENVME_End(handle) return 0 try: startIdx = TreeNode(baseNid + self.N_START_IDX).data() endIdx = TreeNode(baseNid + self.N_END_IDX).data() except: print 'Error evaluating start or end idx' caenLib.CAENVME_End(handle) return 0 try: pts = TreeNode(baseNid + self.N_PTS).data() except: print 'Error evaluating Post Trigger Samples' caenLib.CAENVME_End(handle) return 0 # Stop device status = caenLib.CAENVME_WriteCycle(handle, c_int(vmeAddress + 0x8100), byref(c_int(0L)), c_int(self.cvA32_S_DATA), c_int(self.cvD32)) if status != 0: print 'Error stopping device' caenLib.CAENVME_End(handle) return 0 #need to wait a while time.sleep(0.1) # Read number of buffers */ actSegments = c_int(0) status = caenLib.CAENVME_ReadCycle(handle, c_int(vmeAddress + 0x812C), byref(actSegments), c_int(self.cvA32_S_DATA), c_int(self.cvD32)) if status != 0: print 'Error reading number of acquired segments' caenLib.CAENVME_End(handle) return 0 print 'Acquired segments: ', actSegments.value if actSegments.value == 0: caenLib.CAENVME_End(handle) return 1 #Compute Segment Size try: nSegments = TreeNode(baseNid + self.N_NUM_SEGMENTS).data() segmentSamples = 196608/nSegments print 'Segment samples: ', segmentSamples except: print 'Error reading max number of segments' caenLib.CAENVME_End(handle) return 0 # Get Active groups groupMask = c_int(0) status = caenLib.CAENVME_ReadCycle(handle, c_int(vmeAddress + 0x8120), byref(groupMask), c_int(self.cvA32_S_DATA), c_int(self.cvD32)) nActGroups = 0 groupMask = groupMask.value for group in range(0,8): if (groupMask & (1 << group)) != 0: nActGroups = nActGroups + 1 if nActGroups == 0: print 'No active groups' caenLib.CAENVME_End(handle) return 1 segmentSize = 16 + segmentSamples * nActGroups * 8 * 12 / 8 class V1740Data(Structure): _fields_ = [("eventSize", c_int), ("boardGroup", c_int), ("counter", c_int), ("time", c_int), ("data", c_int * (segmentSamples * 64*12/(8*4)))] actSegments = actSegments.value currStartIdx = segmentSamples - pts + startIdx currEndIdx = segmentSamples - pts + endIdx DataArray = c_short * ((currEndIdx - currStartIdx + 1) * actSegments) triggers = [] deltas = [] channels = [] for chan in range(0,64): channels.append([]) for chan in range(0,64): channels[chan] = DataArray() c = [] for i in range(0,64): c.append(0) for sample in range(0,actSegments): segment= V1740Data() retLen = c_int(0) status = caenLib.CAENVME_FIFOBLTReadCycle(handle, c_int(vmeAddress), byref(segment), c_int(segmentSize), c_int(self.cvA32_S_DATA), c_int(self.cvD64), byref(retLen)) if status != 0: print 'Error reading data segment' caenLib.CAENVME_End(handle) return 0 actSize = 4 * (segment.eventSize & 0x0fffffff) if actSize != segmentSize: print 'Expected event size different from expected size' caenLib.CAENVME_End(handle) return 0 counter = segment.time/2 triggers.append(counter*dt) deltas.append(dt) sizeInInts = (segment.eventSize & 0x0fffffff) - 4; groupSize = sizeInInts/nActGroups; groupOffset = 0 for group in range(0,8): if (groupMask & (1 << group)) != 0: rpnt = 0 sampleCount = 0; while rpnt < groupSize : if rpnt % 9 == 0: if sampleCount >= currStartIdx and sampleCount <= currEndIdx : channels[group*8][c[group*8+0]] = segment.data[groupOffset+rpnt] & 0x00000FFF c[group*8+0] = c[group*8+0]+1 if sampleCount +1 >= currStartIdx and sampleCount + 1 <= currEndIdx : channels[group*8][c[group*8+0]] = (segment.data[groupOffset+rpnt] & 0x00FFF000) >> 12 c[group*8+0] = c[group*8+0]+1 if sampleCount +2 >= currStartIdx and sampleCount +2 <= currEndIdx : channels[group*8][c[group*8+0]] = ((segment.data[groupOffset+rpnt] & 0xFF000000) >> 24) | ((segment.data[groupOffset+rpnt+1] & 0x0000000F) << 8) c[group*8+0] = c[group*8+0]+1 if rpnt % 9 == 1: if sampleCount >= currStartIdx and sampleCount <= currEndIdx : channels[group*8+1][c[group*8+1]] = (segment.data[groupOffset+rpnt] & 0x0000FFF0) >> 4 c[group*8+1] = c[group*8+1]+1 if sampleCount +1 >= currStartIdx and sampleCount + 1 <= currEndIdx : channels[group*8+1][c[group*8+1]] = (segment.data[groupOffset+rpnt] & 0x0FFF0000) >> 16 c[group*8+1] = c[group*8+1]+1 if sampleCount +2 >= currStartIdx and sampleCount +2 <= currEndIdx : channels[group*8+1][c[group*8+1]] = ((segment.data[groupOffset+rpnt] & 0xF0000000) >> 28) | ((segment.data[groupOffset+rpnt+1] & 0x000000FF) << 4) c[group*8+1] = c[group*8+1]+1 if rpnt % 9 == 2: if sampleCount >= currStartIdx and sampleCount <= currEndIdx : channels[group*8+2][c[group*8+2]] = (segment.data[groupOffset+rpnt] & 0x000FFF00) >> 8 c[group*8+2] = c[group*8+2]+1 if sampleCount +1 >= currStartIdx and sampleCount + 1 <= currEndIdx : channels[group*8+2][c[group*8+2]] = (segment.data[groupOffset+rpnt] & 0xFFF00000) >> 20 c[group*8+2] = c[group*8+2]+1 if rpnt % 9 == 3: if sampleCount +2 >= currStartIdx and sampleCount +2 <= currEndIdx : channels[group*8+2][c[group*8+2]] = segment.data[groupOffset+rpnt] & 0x00000FFF c[group*8+2] = c[group*8+2]+1 if sampleCount >= currStartIdx and sampleCount <= currEndIdx : channels[group*8+3][c[group*8+3]] = (segment.data[groupOffset+rpnt] & 0x00FFF000) >> 12 c[group*8+3] = c[group*8+3]+1 if sampleCount +1 >= currStartIdx and sampleCount + 1 <= currEndIdx : channels[group*8+3][c[group*8+3]] = ((segment.data[groupOffset+rpnt] & 0xFF000000) >> 24) | ((segment.data[groupOffset+rpnt+1] & 0x0000000F) << 8) c[group*8+3] = c[group*8+3]+1 if rpnt % 9 == 4: if sampleCount +2 >= currStartIdx and sampleCount +2 <= currEndIdx : channels[group*8+3][c[group*8+3]] = (segment.data[groupOffset+rpnt] & 0x0000FFF0) >> 4 c[group*8+3] = c[group*8+3]+1 if sampleCount >= currStartIdx and sampleCount <= currEndIdx : channels[group*8+4][c[group*8+4]] = (segment.data[groupOffset+rpnt] & 0x0FFF0000) >> 16 c[group*8+4] = c[group*8+4]+1 if sampleCount +1 >= currStartIdx and sampleCount + 1 <= currEndIdx : channels[group*8+4][c[group*8+4]] = ((segment.data[groupOffset+rpnt] & 0xF0000000) >> 28) | ((segment.data[groupOffset+rpnt+1] & 0x000000FF) << 4) c[group*8+4] = c[group*8+4]+1 if rpnt % 9 == 5: if sampleCount +2 >= currStartIdx and sampleCount +2 <= currEndIdx : channels[group*8+4][c[group*8+4]] = (segment.data[groupOffset+rpnt] & 0x000FFF00) >> 8 c[group*8+4] = c[group*8+4]+1 if sampleCount >= currStartIdx and sampleCount <= currEndIdx : channels[group*8+5][c[group*8+5]] = (segment.data[groupOffset+rpnt] & 0xFFF00000) >> 20 c[group*8+5] = c[group*8+5]+1 if rpnt % 9 == 6: if sampleCount +1 >= currStartIdx and sampleCount + 1 <= currEndIdx : channels[group*8+5][c[group*8+5]] = segment.data[groupOffset+rpnt] & 0x00000FFF c[group*8+5] = c[group*8+5]+1 if sampleCount +2 >= currStartIdx and sampleCount +2 <= currEndIdx : channels[group*8+5][c[group*8+5]] = (segment.data[groupOffset+rpnt] & 0x00FFF000) >> 12 c[group*8+5] = c[group*8+5]+1 if sampleCount >= currStartIdx and sampleCount <= currEndIdx : channels[group*8+6][c[group*8+6]] = ((segment.data[groupOffset+rpnt] & 0xFF000000) >> 24) | ((segment.data[groupOffset+rpnt+1] & 0x0000000F) << 8) c[group*8+6] = c[group*8+6]+1 if rpnt % 9 == 7: if sampleCount +1 >= currStartIdx and sampleCount + 1 <= currEndIdx : channels[group*8+6][c[group*8+6]] = (segment.data[groupOffset+rpnt] & 0x0000FFF0) >> 4 c[group*8+6] = c[group*8+6]+1 if sampleCount +2 >= currStartIdx and sampleCount +2 <= currEndIdx : channels[group*8+6][c[group*8+6]] = (segment.data[groupOffset+rpnt] & 0x0FFF0000) >> 16 c[group*8+6] = c[group*8+6]+1 if sampleCount >= currStartIdx and sampleCount <= currEndIdx : channels[group*8+7][c[group*8+7]] = ((segment.data[groupOffset+rpnt] & 0xF0000000) >> 28) | ((segment.data[groupOffset+rpnt+1] & 0x000000FF) << 4) c[group*8+7] = c[group*8+7]+1 if rpnt % 9 == 8: if sampleCount +1 >= currStartIdx and sampleCount + 1 <= currEndIdx : channels[group*8+7][c[group*8+7]] = (segment.data[groupOffset+rpnt] & 0x000FFF00) >> 8 c[group*8+7] = c[group*8+7]+1 if sampleCount +2 >= currStartIdx and sampleCount +2 <= currEndIdx : channels[group*8+7][c[group*8+7]] = (segment.data[groupOffset+rpnt] & 0xFFF00000) >> 20 c[group*8+7] = c[group*8+7]+1 if rpnt % 9 == 8: sampleCount = sampleCount + 3 rpnt = rpnt + 1 #endwhile groupOffset = groupOffset + groupSize #endif #endfor group in range(0:8) #endfor samples in range(0, actSegments) if len(TreeNode(baseNid + self.N_TRIG_SOURCE).getShape()) > 0: dim = Dimension(Window(startIdx,endIdx+(actSegments - 1) * (endIdx - startIdx), trig[0]),Range(Float64Array(trig) + Float64(startIdx * dt), Float64Array(trig) + Float64(endIdx * dt), Float64Array(deltas))) else: dim = Dimension(Window(startIdx,endIdx+(actSegments - 1) * (endIdx - startIdx), trig),Range(Float64Array(triggers) - Float64(triggers[0]) + Float64(trig) + Float64(startIdx * dt), Float64Array(triggers) - Float64(triggers[0]) + Float64(trig) + Float64(endIdx * dt), Float64Array(deltas))) print 'DIM: ', dim dim.setUnits("s"); chan0Nid = baseNid + self.N_CHANNEL_0 data0Nid = baseNid + self.N_DATA_0 for group in range(0,8): if groupMask & (1 << group): try: offset = TreeNode(baseNid + self.N_CHANNEL_0 + group * self.N_CHAN_DATA + self.N_CHAN_OFFSET).data() except: print 'Error evaluating group offset' caenLib.CAENVME_End(handle) return 0 for chan in range(0,8): raw = Int16Array(channels[group * 8 + chan]) raw.setUnits("counts") data = Data.compile("2*($VALUE - 2048)/4096.+$1", offset); data.setUnits("Volts") signal = Signal(data, raw, dim) try: TreeNode(baseNid + self.N_DATA_0 + group * 8 + chan).putData(signal) except: print 'Cannot write Signal in the tree' caenLib.CAENVME_End(handle) return 0 #endfor #endif #endfor caenLib.CAENVME_End(handle) return 1 except: print 'Generic Error' caenLib.CAENVME_End(handle) return 0
def doFrame(self): def getStringExp(self, name, response_headers, _tdi): if name in self.args: try: response_headers.append( (name, str(_tdi(self.args[name][-1]).data()))) except Exception as e: response_headers.append((name, "ERROR: %s" % (e, ))) response_headers = list() response_headers.append( ('Cache-Control', 'no-store, no-cache, must-revalidate')) response_headers.append(('Pragma', 'no-cache')) response_headers.append(('Content-Type', 'application/octet-stream')) if 'tree' in self.args: tree = self.openTree(self.args['tree'][-1], self.args['shot'][-1].split(',')[0]) _tdi = tree.tdiExecute else: tree = None _tdi = tdi for name in ('title', 'xlabel', 'ylabel'): getStringExp(self, name, response_headers, _tdi) if 'frame_idx' in self.args: frame_idx = self.args['frame_idx'][-1] else: frame_idx = '0' expr = self.args['y'][-1] sig = _tdi('GetSegment(' + expr + ',' + frame_idx + ')') frame_data = DATA(sig).evaluate() response_headers.append(('FRAME_WIDTH', str(sig.getShape()[0]))) response_headers.append(('FRAME_HEIGHT', str(sig.getShape()[1]))) response_headers.append( ('FRAME_BYTES_PER_PIXEL', str(frame_data.data().itemsize))) response_headers.append(('FRAME_LENGTH', str(len(frame_data)))) output = str(frame_data.data().data) if 'init' in self.args: if 'x' in self.args: expr = self.args['x'][-1] times = DATA(_tdi(expr)).evaulate() else: times = list() numSegments = _tdi('GetNumSegments(' + expr + ')').data() for i in range(0, numSegments): times.append( _tdi('GetSegmentLimits(' + expr + ',' + str(i) + ')').data()[0]) times = Float64Array(times) response_headers.append(('TIMES_DATATYPE', times.__class__.__name__)) response_headers.append(('TIMES_LENGTH', str(len(times)))) output = output + str(times.data().data) status = '200 OK' return (status, response_headers, output)