def readZipFile(filename, subscriber=1): data, names = loadDataFromZip(filename, subscriber) commonAttr, variableAttr = collectAttributes(data, names) #Wrap variable attributes into FieldContainer containers = [ column2FieldContainer(longname, column) for longname, column in variableAttr.iteritems() ] #Process SampleContainers of parsed FMF files and skip independent #variables, which are used as dimensions. fieldData, dependencies, units, shortnames = unpackAndCollateFields( variableAttr, data) independentFieldsNames = [] for fieldName, dependency in dependencies.iteritems(): if dependencies[fieldName] == []: independentFieldsNames.append(fieldName) for fieldName in independentFieldsNames: del dependencies[fieldName] #Build independent fields independentFields = {} for indepField in independentFieldsNames: indepData = checkAndCondense(fieldData[indepField]) independentFields[indepField] = DataContainer.FieldContainer( numpy.array(indepData), longname=indepField, shortname=shortnames[indepField], unit=units[indepField], rescale=True) #Build dependent fields #QUESTION: Can a field depend on a dependent field? for field, dependency in dependencies.iteritems(): newField = DataContainer.FieldContainer(numpy.array(fieldData[field]), longname=field, shortname=shortnames[field], unit=units[field], rescale=True) for i, indepField in enumerate(dependency): dim = len(newField.dimensions) - i - 1 newField.dimensions[dim] = independentFields[indepField] assert newField.isValid() containers.append(newField) #The next lines are a hack and should be dealt with properly... if u'creator' in commonAttr.keys(): creator = commonAttr[u'creator'] del commonAttr[u'creator'] result = DataContainer.SampleContainer(containers, attributes=commonAttr) result.creator = creator else: result = DataContainer.SampleContainer(containers, attributes=commonAttr) return result
def compute(self, field, subscriber=1): percentage = 0 functionals = DataContainer.SampleContainer([self.computeDistances(column, subscriber, percentage) for column in field], longname='Functionals of %s'%field.longname, shortname='F_{%s}'%field.shortname) functionals.seal() return functionals
def setUp(self): self.n = 100 self.m = 10 self.kappa1 = 0.0 self.errLevelPos = 6 self.errLevelCurv = 5 self.x = numpy.linspace(-1.5, 1.5, self.n) self.lamb = numpy.linspace(-1.0, 1.0, self.m) X, LAMB = scipy.meshgrid(self.x, self.lamb) lambField = DC.FieldContainer(LAMB, unit='1 V / m**3', longname='parameter', shortname='\lambda') xField = DC.FieldContainer(X[0], unit='1 m', longname='position', shortname='x') V = [] for i in xrange(len(lambField.data)): u = xField.data V.append(-lambField.data[i] / 2 * u**2 + u**4 / 4 - u * self.kappa1) self.I = DC.FieldContainer(numpy.array(V), longname='intensity', shortname='I') self.I.dimensions[-1] = xField self.I0 = DC.FieldContainer(numpy.ones(self.I.data.shape, 'float'), longname='white reference', shortname='I_0') self.I0.dimensions[-1] = xField self.Id = DC.FieldContainer(numpy.zeros(self.I.data.shape, 'float'), longname='darf reference', shortname='I_d') self.Id.dimensions[-1] = xField self.sampleC = DC.SampleContainer([self.I, self.I0, self.Id])
def readDataFile(filename): filehandle = open(filename, 'r') dat = filehandle.read() filehandle.close() rawContainer = readSingleFile(dat, filename) if len(rawContainer) == 1: container = rawContainer[0] container.seal() return container newSample = DataContainer.SampleContainer(rawContainer, longname='List of tables', shortname='L', attributes=copy.deepcopy( rawContainer[0].attributes)) newSample.seal() return newSample
def loadOscFromFile(filename, subscriber=0): try: data, dataSections = readZipFile(filename, subscriber=subscriber) except zipfile.BadZipfile: data, dataSections = readDataFile(filename) container = constructTemplate(data, dataSections) for d in data: for dicname in dataSections: for k in d[dicname].keys(): container[k].append(d[dicname][k]) for i, col in enumerate(d[u'SPALTENBESCHRIFTUNG']): container[col].append(d[u'MESSDATEN'][:, i]) cols = [createFieldContainer(k, v) for k, v in container.iteritems()] if container.has_key('KOMMENTAR'): title = container[u'KOMMENTAR'][0] else: title = '' return DataContainer.SampleContainer(cols, longname=title)
def documentCovering(self, image, subscriber=0): thresholds = scipy.array([ self.paramLowerThreshold.value, self.paramUpperThreshold.value, scipy.amax(image.data) ]) coveringVec = self.getCovering(image) theta = DataContainer.FieldContainer( thresholds, '1', longname='Value of upper threshold', shortname='\theta') A = DataContainer.FieldContainer(coveringVec, '1', longname='Covering', shortname='A') print theta.data, thresholds print A.data, coveringVec res = DataContainer.SampleContainer([theta, A], u"Covering of image parts", u"X_A") res.seal() return res
newField = reshapeField(field) except TypeError: raise if field.data.dtype.name.startswith('string'): _logger.warning('Warning: Cannot reshape numpy.array \ of string: %s' % field) newField = field else: _logger.error('Error: Cannot reshape numpy.array: %s' % field) import sys sys.exit(0) reshapedFields.append(newField) if shortname == None: shortname = 'T' return DataContainer.SampleContainer(reshapedFields, longname=longname, shortname=shortname) def preParseData(b): localVar = {'fmf-version': '1.1', 'coding': 'utf-8', 'delimiter': '\t'} commentChar = ';' if b.startswith(codecs.BOM_UTF8): b = b.lstrip(codecs.BOM_UTF8) if b[0] == ';' or b[0] == '#': commentChar = b[0] items = [ var.strip().split(':') for var in b.split('-*-')[1].split(';') ] try: for key, value in items:
def mra(self, field, subscriber=0): dim = field.dimensions[-1] try: scale = quantities.Quantity(self.paramScale.value.encode('utf-8')) except: scale = float(self.paramScale.value) numb_edge = 100.0 / self.paramNumb_edge.value d = scipy.diff(dim.data) numpy.testing.assert_array_almost_equal(d.min(), d.max(), 4) sigmaMax = scale / (d[0] * dim.unit) if len(field.data.shape) > 1: p_e = [] inc = 100. / len(field.data) acc = 0. for field1d in field: try: p_e.append(mra1d(dim, field1d, sigmaMax, numb_edge)) except MraError: p_e.append((([], []), ([], []))) acc += inc subscriber %= acc minima, maxima = zip(*p_e) n_min, pos_min, err_min = pos_error_to_data_container(minima) n_max, pos_max, err_max = pos_error_to_data_container(maxima) dims_min = [ DataContainer.generateIndex(0, n_min), field.dimensions[0] ] dims_max = [ DataContainer.generateIndex(0, n_max), field.dimensions[0] ] else: (pos_min, err_min), (pos_max, err_max) = mra1d(dim, field, sigmaMax, numb_edge) dims_min = [DataContainer.generateIndex(0, len(pos_min))] dims_max = [DataContainer.generateIndex(0, len(pos_max))] subscriber %= 100. minima = DataContainer.FieldContainer( pos_min.transpose(), error=err_min.transpose(), unit=dim.unit, dimensions=dims_min, mask=numpy.isnan(pos_min).transpose(), longname="%s of the local %s of %s" % (dim.longname, "minima", field.longname), shortname="%s_{min}" % dim.shortname) maxima = DataContainer.FieldContainer( pos_max.transpose(), error=err_max.transpose(), unit=dim.unit, dimensions=dims_max, mask=numpy.isnan(pos_max).transpose(), longname="%s of the local %s of %s" % (dim.longname, "maxima", field.longname), shortname="%s_{max}" % dim.shortname) roots = DataContainer.SampleContainer( [minima, maxima], longname="%s of the local %s of %s" % (dim.longname, "extrema", field.longname), shortname="%s_{extrem}" % dim.shortname) if self.paramLongname.value != 'default': roots.longname = self.paramLongname.value if self.paramSymbol.value != 'default': roots.shortname = self.paramSymbol.value roots.seal() return roots
def findUltimatePoints(self, image, subscriber=0): img = image.data nx, ny = img.shape ultimatePoints = [] #corners: if img[0, 0] == scipy.amax(scipy.amax(img[:2, :2])): ultimatePoints.append((0, 0, img[0, 0])) if img[0, ny - 1] == scipy.amax(scipy.amax(img[:2, ny - 2:])): ultimatePoints.append((0, ny - 1, img[0, ny - 1])) if img[nx - 1, 0] == scipy.amax(scipy.amax(img[nx - 2:, :2])): ultimatePoints.append((nx - 1, 0, img[nx - 1, 0])) if img[nx - 1, ny - 1] == scipy.amax(scipy.amax(img[nx - 2:, ny - 2:])): ultimatePoints.append((nx - 1, ny - 1, img[nx - 1, ny - 1])) #upper edge: for x in xrange(1, nx - 1): if img[x, 0] == scipy.amax(scipy.amax(img[x - 1:x + 2, :2])): ultimatePoints.append((x, 0, img[x, 0])) #lower edge: for x in xrange(1, nx - 1): if img[x, ny - 1] == scipy.amax(scipy.amax(img[x - 1:x + 2, ny - 2:])): ultimatePoints.append((x, ny - 1, img[x, ny - 1])) #left edge: for y in xrange(1, ny - 1): if img[0, y] == scipy.amax(scipy.amax(img[:2, y - 1:y + 2])): ultimatePoints.append((0, y, img[0, y])) #right edge: for y in xrange(1, ny - 1): if img[nx - 1, y] == scipy.amax(scipy.amax(img[nx - 2:, y - 1:y + 2])): ultimatePoints.append((nx - 1, y, img[nx - 1, y])) #inner image: for y in xrange(1, ny - 1): for x in xrange(1, nx - 1): if img[x, y] == scipy.amax( scipy.amax(img[x - 1:x + 2, y - 1:y + 2])): ultimatePoints.append((x, y, img[x, y])) ultimatePoints = scipy.array( filter(lambda (x, y, v): v > 0, ultimatePoints)) x = DataContainer.FieldContainer( ultimatePoints[:, 0], image.dimensions[0].unit, longname=image.dimensions[0].longname, shortname=image.dimensions[0].shortname) y = DataContainer.FieldContainer( ultimatePoints[:, 1], image.dimensions[1].unit, longname=image.dimensions[1].longname, shortname=image.dimensions[1].shortname) ## z = DataContainer.FieldContainer(ultimatePoints[:, 2], ## scipy.sqrt(image.dimensions[0].unit ** 2 ## + image.dimensions[1].unit ** 2), ## longname=u"Distance to background", ## shortname=u"d") z = DataContainer.FieldContainer(ultimatePoints[:, 2], image.unit, longname=u"Distance to background", shortname=u"d") x.seal() y.seal() z.seal() return DataContainer.SampleContainer([x, y, z], u"Ultimate points from %s"\ %(image.longname), u"D")