def test_vanishing_moments(self): """Test that coefficients in lp satisfy the vanishing moments condition """ from daubfilt import daubfilt, number_of_filters for i in range(number_of_filters): D = 2*(i+1) P = D/2 # Number of vanishing moments N = P-1 # Dimension of nullspace of the matrix A R = P+1 # Rank of A, R = D-N = P+1 equations lp, hp = daubfilt(D) # Condition number of A grows with P, so we test only # the first 6 (and eps is slightly larger than machine precision) A = zeros((R,D), Float) # D unknowns, D-N equations b = zeros((R,1), Float) # Right hand side b[0] = sqrt(2) A[0,:] = ones(D, Float) # Coefficients must sum to sqrt(2) for p in range(min(P,6)): # the p'th vanishing moment (Cond Ap) for k in range(D): m=D-k; A[p+1,k] = (-1)**m * k**p; assert allclose(b, mvmul(A,lp))
def test_conservation_of_area(self): """Test that coefficients in lp satisfy the dilation equation """ from daubfilt import daubfilt, number_of_filters for p in range(number_of_filters): D = 2*(p+1) lp, hp = daubfilt(D) err = abs(sum(lp)-sqrt(2)) #assert abs(err) <= epsilon, 'Error == %e' %err assert allclose(err, 0), 'Error == %e' %err
def test_cpt_interpolation(self): cpt = CPT(test_filename) assert allclose(cpt.get_color(0), 0) assert allclose(cpt.get_color(3), 85) assert allclose(cpt.get_color(6), 170) assert allclose(cpt.get_color(9), 255) assert allclose(cpt.get_color(1), 28.3333333333) assert allclose(cpt.get_color(1.5), 42.49999999) assert allclose(cpt.get_color(4.5), 85+42.49999999)
def test_orthogonality(self): """Test that coefficients in lp satisfy orthogonality condition """ from daubfilt import daubfilt, number_of_filters for p in range(number_of_filters): D = 2*(p+1) P = D/2 # Number of vanishing moments N = P-1 # Dimension of nullspace of the matrix A lp, hp = daubfilt(D) for k in range(1, N+1): #FIXME: use P o = ortho(k,lp); if k==0: o = 1-o err = abs(o) #assert abs(err) <= epsilon, 'Error == %e' %err assert allclose(err, 0), 'Error == %e' %err
def generate_one_colour_style(point, min_amp, max_amp, cpt, colour_override=None, transparency='AA'): """Generate colours based on amplitude colour_override: Use this colour irrespective of amplitude transparency: 00 is tranparent, FF is solid. Default = 'AA' """ amp = float(point[2]) if colour_override is None: if allclose(min_amp, max_amp): # All amplitudes are equal normalised_amp = min_amp if allclose(normalised_amp, 0.0): colour = '333333' # Dark Grey else: colour = '999999' # Grey else: normalised_amp = (amp-min_amp)/(max_amp-min_amp) # In [0,1] normalised_amp = math.sqrt(normalised_amp) # Bias upwards red, green, blue = cpt.get_color(normalised_amp) RED = float2hexstring(red) BLUE = float2hexstring(blue) GREEN = float2hexstring(green) colour = (BLUE + GREEN + RED).upper() # The order used in KML style_id = '%.3f' %amp else: colour = colour_override style_id = 'override' kml = """ <Style id="%s"> <LineStyle> <color>FF%s</color> <width>0.4</width> </LineStyle> <PolyStyle> <outline>1</outline> <fill>1</fill> <color>%s%s</color> </PolyStyle> <IconStyle> <scale>0.1</scale> <Icon><href>1_pixel_white.png</href></Icon> </IconStyle> <BalloonStyle> <color>FFA56A</color> </BalloonStyle> </Style> """ %(style_id, colour, transparency, colour) return kml
def test_that_cpt_can_be_read(self): cpt = CPT(test_filename) assert allclose(cpt.background_color, [0,0,0]) assert allclose(cpt.foreground_color, [255,255,255]) assert allclose(cpt.nan_color, [128,128,128]) assert cpt.color_model == 'RGB' assert allclose(cpt.segments[0].lower_bound, 0) assert allclose(cpt.segments[0].upper_bound, 3) assert allclose(cpt.segments[1].lower_bound, 3) assert allclose(cpt.segments[1].upper_bound, 6) assert allclose(cpt.segments[2].lower_bound, 6) assert allclose(cpt.segments[2].upper_bound, 9) assert allclose(cpt.segments[0].rgb_min, [0,0,0]) assert allclose(cpt.segments[0].rgb_dif, [85,85,85]) assert allclose(cpt.segments[1].rgb_min, [85,85,85]) assert allclose(cpt.segments[1].rgb_dif, [170-85,170-85,170-85]) assert allclose(cpt.segments[2].rgb_min, [170,170,170]) assert allclose(cpt.segments[2].rgb_dif, [85,85,85]) assert cpt.segments[0].color_segment_boundary == 'L' assert cpt.segments[1].color_segment_boundary == '' assert cpt.segments[2].color_segment_boundary == 'U'
def test_remapping(self): """Test that threshold values can be rescaled """ cpt = CPT(test_filename) cpt.normalise() assert allclose(cpt.segments[0].lower_bound, 0) assert allclose(cpt.segments[0].upper_bound, 1./3) assert allclose(cpt.segments[1].lower_bound, 1./3) assert allclose(cpt.segments[1].upper_bound, 2./3) assert allclose(cpt.segments[2].lower_bound, 2./3) assert allclose(cpt.segments[2].upper_bound, 1.0) # Test that colours and flags are unchanged assert allclose(cpt.segments[0].rgb_min, [0,0,0]) assert allclose(cpt.segments[0].rgb_dif, [85,85,85]) assert allclose(cpt.segments[1].rgb_min, [85,85,85]) assert allclose(cpt.segments[1].rgb_dif, [170-85,170-85,170-85]) assert allclose(cpt.segments[2].rgb_min, [170,170,170]) assert allclose(cpt.segments[2].rgb_dif, [85,85,85]) assert cpt.segments[0].color_segment_boundary == 'L' assert cpt.segments[1].color_segment_boundary == '' assert cpt.segments[2].color_segment_boundary == 'U' cpt.rescale(-10, 20) assert allclose(cpt.segments[0].lower_bound, -10) assert allclose(cpt.segments[0].upper_bound, 0) assert allclose(cpt.segments[1].lower_bound, 0) assert allclose(cpt.segments[1].upper_bound, 10) assert allclose(cpt.segments[2].lower_bound, 10) assert allclose(cpt.segments[2].upper_bound, 20) # Test that colours and flags are unchanged assert allclose(cpt.segments[0].rgb_min, [0,0,0]) assert allclose(cpt.segments[0].rgb_dif, [85,85,85]) assert allclose(cpt.segments[1].rgb_min, [85,85,85]) assert allclose(cpt.segments[1].rgb_dif, [170-85,170-85,170-85]) assert allclose(cpt.segments[2].rgb_min, [170,170,170]) assert allclose(cpt.segments[2].rgb_dif, [85,85,85]) assert cpt.segments[0].color_segment_boundary == 'L' assert cpt.segments[1].color_segment_boundary == '' assert cpt.segments[2].color_segment_boundary == 'U'
def _baumWelsh(self,obsIndices,maxiter,scale_every=50): """Uses Baum-Welsh algorithm to learn the probabilities Scaling on the forward and backward values is automatically performed when numerical problems (underflow) are encountered. Each iteration prints a dot on stderr, or a star if scaling was applied""" B = self.B A = self.A pi= self.pi apply_scaling=0 for iter in xrange(1,maxiter+1): print '(%s/%s),'% (iter, maxiter) if not (iter%scale_every): apply_scaling=1 try: if apply_scaling: stderr.write('*') alpha,scale_factors = self._alpha_scaled(obsIndices) apply_scaling = 0 else: stderr.write('.') alpha = self._alpha(obsIndices) scale_factors = None stderr.flush() beta = self._beta(obsIndices,scale_factors) ksi = self._ksi(obsIndices,alpha,beta) except OverflowError: if apply_scaling: # we have overflown even though scaling was applied # There's nothing much we can do, so we abort. :o( from traceback import print_exc print_exc() print "There's nothing much we can do, so we abort. :o(" raise else: # we have overflown: we process the exception by # decreasing scale_every, setting apply_scaling to TRUE # and restarting the loop scale_every = int(floor(scale_every/1.5)) apply_scaling = 1 continue gamma = self._gamma(ksi) sigma_gamma = reduce(add,gamma) pi_bar = gamma[0] # (40a) A_bar = reduce(add,ksi)/sigma_gamma # (40b) B_bar = zeros((self.M,self.N),Float) # sort things out sorter = {} for i in range(len(obsIndices)-1): partial = sorter.setdefault(obsIndices[i],B_bar[obsIndices[i]]) partial += gamma[i] B_bar /= sigma_gamma if allclose(A, A_bar) and allclose(B,B_bar) and \ allclose(pi,pi_bar): print 'Converged in %d iterations'%iter break else: self.A = A = A_bar self.B = B = B_bar self.pi = pi = pi_bar else: print "The Baum-Welsh algorithm had not converged in %d iterations"%maxiter
def multiple_learn(self, m_observations, states=None, maxiter=10000, scale_every=50): """Uses Baum-Welsh algorithm to learn the probabilities on multiple observations sequences """ K=len(m_observations) apply_scaling = [0]*K scale_every = [scale_every]*K all_scaling = 0 B = self.B A = self.A pi= self.pi epsilon_v = EPSILON*self.N for iter in xrange(1,maxiter+1): print '(%s/%s),'% (iter, maxiter) A_bar = zeros((self.N,self.N),Float) B_bar = zeros((self.M,self.N),Float) sigma_gamma = zeros((self.N,),Float) ok = 0 for k in range(K): observations = m_observations[k] # FIXME cache obsIndices ? obsIndices = self._getObservationIndices(observations) if not (iter%scale_every[k]): apply_scaling[k] = 1 try: if apply_scaling[k]: stderr.write('*') alpha,scale_factors = self._alpha_scaled(obsIndices) apply_scaling[k] = 0 else: stderr.write('.') alpha = self._alpha(obsIndices) scale_factors = None stderr.flush() beta = self._beta(obsIndices,scale_factors) ksi = self._ksi(obsIndices,alpha,beta) except OverflowError: from traceback import print_exc if apply_scaling[k]: # we have overflown even though scaling was applied, # hope the next is better stderr.write('f') continue else: # we have overflown: we process the exception by # decreasing scale_every, setting apply_scaling to TRUE # and restarting the loop if scale_every[k] >= 1.5: scale_every[k] = int(floor(scale_every[k]/1.5)) apply_scaling[k] = 1 stderr.write('o') #print_exc() continue stderr.write('+') gamma = self._gamma(ksi) sigma_gamma_k = reduce(add,gamma) pi_bar = gamma[0] A_bar_k = reduce(add, ksi) B_bar_k = zeros((self.M,self.N),Float) # sort things out sorter = {} for i in range(len(obsIndices)-1): partial = sorter.setdefault(obsIndices[i],B_bar_k[obsIndices[i]]) partial += gamma[i] # add temp results sigma_gamma = add(sigma_gamma, sigma_gamma_k) # (109) et (110) denominateur A_bar = add(A_bar, A_bar_k) # (109) numerateur B_bar = add(B_bar, B_bar_k) # (110) numerateur ok = 1 stderr.write('\n') if not ok: # we have overflown in all sequences if not all_scaling: all_scaling = 1 continue # and this is the second time else: raise Exception("There's nothing much we can do, so we abort. :o(") all_scaling = 0 # replace 0 with epsilon ? # sigma_gamma = where(sigma_gamma, sigma_gamma, epsilon_v) A_bar /= sigma_gamma # (109) B_bar /= sigma_gamma # (110) #print A_bar #print B_bar if allclose(A, A_bar) and allclose(B,B_bar) and \ allclose(pi,pi_bar): print 'Converged in %d iterations'%iter break else: self.A = A = A_bar self.B = B = B_bar self.pi = pi = pi_bar else: print "The Baum-Welsh algorithm had not converged in %d iterations"%maxiter
#Various introspective tricks print B.iscontiguous() #Do it again C=foo(B) # even if a is proper-contiguous # and has proper type, a copy is made # forced by intent(copy) attribute # to preserve its original contents print print 'Result 2' print B #Original array is preserved print C assert not allclose(B, C), 'B and C should have been different' #Do it again with overwrite False C=foo(B, overwrite_a=False) print print 'Result 2' print B #Original array is preserved print C assert not allclose(B, C), 'B and C should have been different' #Do it again but this time overwrite original array C=foo(B, overwrite_a=True)