def start_live_video(self, framerate=None, **kwds): self._handle_kwds(kwds) self._set_binning(kwds['vbin'], kwds['hbin']) self._set_AOI(kwds['left'], kwds['top'], kwds['right'], kwds['bot']) self._set_exposure(kwds['exposure_time']) self._free_image_mem_seq() self._allocate_mem_seq(num_bufs=2) self._set_queueing(False) if framerate is None: framerate = IS_GET_FRAMERATE else: framerate = framerate.m_as('Hz') newFPS = DOUBLE() ret = lib.is_SetFrameRate(self._hcam, DOUBLE(framerate), pointer(newFPS)) if ret != IS_SUCCESS: log.warn("Failed to set framerate") else: self.framerate = newFPS.value lib.is_SetExternalTrigger(self._hcam, IS_SET_TRIGGER_OFF) lib.is_EnableEvent(self._hcam, IS_SET_EVENT_FRAME) lib.is_CaptureVideo(self._hcam, IS_WAIT)
def start_live_video(self, framerate=None): """Start live video capture. Parameters ---------- framerate : float, optional Desired framerate. The true framerate that results can be found in the ``framerate`` attribute. """ self._install_event_handler() if framerate is None: framerate = IS_GET_FRAMERATE newFPS = DOUBLE() ret = lib.is_SetFrameRate(self._hcam, DOUBLE(framerate), pointer(newFPS)) if ret != IS_SUCCESS: print("Error: failed to set framerate") else: self.framerate = newFPS.value lib.is_CaptureVideo(self._hcam, IS_WAIT) self.is_live = True
def _get_exposure_inc(self): param = DOUBLE() lib.is_Exposure(self._hcam, IS_EXPOSURE_CMD_GET_EXPOSURE_RANGE_INC, byref(param), 8) return Q_(param.value, 'ms')
def _set_exposure(self, exp_time): param = DOUBLE(exp_time.m_as('ms')) cbSizeOfParam = UINT(8) lib.is_Exposure(self._hcam, IS_EXPOSURE_CMD_SET_EXPOSURE, byref(param), cbSizeOfParam) return param
def sumDOW(self, nextLayer): ttl = DOUBLE(0) for item in nextLayer: ttl += self.outputWeights[item].weight * nextLayer[item].gradient return ttl
class Neuron(): name = "" numberOfOutputs = 0 connections = [] alpha = DOUBLE(0) eta = DOUBLE(0) gradient = DOUBLE(0) idx = DOUBLE(0) outputValue = DOUBLE(0) outputWeights = [] def __init__(self, name, numberOfOutputs): self.name = name self.idx = self.randomWeight() i = 0 while i < int(numberOfOutputs): self.connections.append( Connection("{0}.cnxn{1}".format(self.name, i))) self.outputWeights.append( OutputWeights("{0}.ow{1}".format(self.name, i), self.idx)) i = i + 1 #print(self.connection.name) #self.outputWeights.setName(self.__name__) def getOutputValue(self): return self.outputValue def setOutputValue(self, desiredValue): self.outputValue = desiredValue def randomWeight(self): return np.random.uniform(0.0, 1.0) def sumDOW(self, nextLayer): ttl = DOUBLE(0) for item in nextLayer: ttl += self.outputWeights[item].weight * nextLayer[item].gradient return ttl def transferFunction(self, x): return np.tanh(x) def updateInputWeights(self, prevLayer): for item in prevLayer: neuron = item oldDeltaWeight = neuron.outputWeights[item].deltaWeight newDeltaWeight = self.eta * neuron.getOutputValue( ) * self.gradient + self.alpha * oldDeltaWeight neuron.outputWeights[item].deltaWeight = newDeltaWeight neuron.outputWeights[item].weight += newDeltaWeight def feedForward(self, prevLayer): ttl = 0.0 for item in prevLayer: ttl += item.getOutputValue() def calcOutputGradient(self, targetValue): delta = targetValue - self.outputValue self.gradient = delta * self.tranferFunctionDerivative( self.outputValue) def calcHiddenGradients(self, nextLayer): self.dow = self.sumDOW(nextLayer) self.gradient = self.dow * self.transferFunctionDerivative( self.outputValue) def printAttributes(self): cnxns = "" for item in self.connections: cnxns = "{0}\n\t\tName: {1}\n\t\tWeight: {2}\n\t\tdeltaWeight: {3}\n".format( cnxns, item.name, item.weight, item.deltaWeight) i = 0 for weight in self.outputWeights: ows = "Name: {0}\n\t\tWeights: {1}".format(weight.name, weight.outputWeights[i]) i = i + 1 print( "Neuron: {0}\n\tConnections:\n\t\t{1}\n\toutputWeights:\n\t\t{2}\n\teta: {3}\n\tgradient: {4}\n\tidx: {5}\n" .format(self.name, cnxns, ows, self.eta, self.gradient, self.idx)) def main(self, numOutputs, desiredIndex): for output in numOutputs: output.push_back(Connection()) output.back().weight = self.randomWeight() self.idx = desiredIndex
class NeuralNetwork(): __name__ = "" __trainingSet__ = [] __topology__ = [] __numInputs__ = 0 __inputLayer__ = [] __numHidden__ = 0 __hiddenLayer__ = [] __numOutputs__ = 0 __outputLayer__ = [] __numberOfLayers__ = 0 __layers__ = [] error = DOUBLE(0) recentAverageError = DOUBLE(0) recentAverageSmoothingFactor = DOUBLE(0) def __init__(self, name, topology, trainingSet): self.__name__ = name self.__topology__ = topology self.__trainingSet__ = trainingSet self.__numberOfLayers__ = len(self.__topology__) i = 0 for item in self.__topology__: nrons = 0 if i == 0: self.__numInputs__ = item while nrons < int(self.__numInputs__): n = Neuron("Input{0}".format(nrons + 1), self.__numHidden__) self.__inputLayer__.append(n) nrons = nrons + 1 self.__layers__.append(self.__inputLayer__) if i == 1: self.__numHidden__ = item while nrons < int(self.__numHidden__): n = Neuron("Hidden{0}".format(nrons + 1), self.__numOutputs__) self.__hiddenLayer__.append(n) nrons = nrons + 1 self.__layers__.append(self.__hiddenLayer__) if i == 2: self.__numOutputs__ = item while nrons < int(self.__numOutputs__): n = Neuron("Output{0}".format(nrons + 1), self.__numHidden__) self.__outputLayer__.append(n) nrons = nrons + 1 self.__layers__.append(self.__outputLayer__) i = i + 1 x = 0 while x < self.__layers__.__len__(): for nron in self.__layers__[x]: nron.printAttributes() x = x + 1 #lyr = 1 #for nron in self.__topology__: # i = 0 # while i < nron: # n = Neuron("Layer{0}.Neuron{1}".format(lyr, i + 1)) # self.__layers__.append(n) # i = i + 1 # lyr = lyr + 1 # print(nron) #for nron in self.__layers__: # print(nron.name) #for item in self.topology: # self.layers.push_back(Layer()) # numOutputs = len(self.topology[item]) #j=0 #for item in self.topology: # self.layers.back().push_back(Neuron(numOutputs, j)) # j = j + 1 def backPropogate(self, targetValues): outputLayer = self.__outputLayer__.back() for item in self.__outputLayer__: delta = targetValues[item] - self.__outputLayer__[ item].getOutputValue() self.error = pow(delta, 2) self.error /= len(outputLayer) - 1 self.error = np.sqrt(self.error) #RecentAverageError self.recentAverageError = ( self.recentAverageError * self.recentAverageSmoothingFactor + self.error) / (self.recentAverageSmoothingFactor + 1.0) #Hidden Layer Gradient i = 0 for item in self.layers: currentLayer = item if i > 0: prevLayer = currentLayer currentLayer = item for nron in currentLayer: nron.updateInputWeights(prevLayer) i = i + 1 def feedForward(self, inputValues): # Network input values elements = inputValues.split(",") input1 = int(elements[0]) input2 = int(elements[1]) outPut = int(elements[2]) for nron in self.__inputLayer__: nron.feedForward(input1) nron.feedForward(input2) #self.layers.setOutputValue(inPut) # Network forward propogate i = 0 for layer in self.layers: curlayer = layer if i > 0: prevlayer = curlayer curlayer = layer self.layers.feedForward(prevlayer) def getRecentAverageError(self): return self.recentAverageError def getResults(self, resultValues): resultValues = [] for layer in self.layers.back().size(): resultValues.push_back()[layer].getOutputValue() def getTopology(self): return self.__topology__
def setPropertyValue(self, iProp, value): return self.f_setpropertyvalue(self.hcam, c_int(iProp), DOUBLE(value))
def getPropertyValue(self, iProp): pValue = DOUBLE() err = self.f_getpropertyvalue(self.hcam, c_int(iProp), byref(pValue)) return pValue.value
def setExposureTime(self, expTime): return self.f_setexposuretime(self.hcam, DOUBLE(expTime))