def tempo_search(db, Key, tempo): """ :: Static tempo-invariant search Returns search results for query resampled over a range of tempos. """ if not db.configCheck(): print("Failed configCheck in query spec.") print(db.configQuery) return None prop = 1. / tempo # the proportion of original samples required for new tempo qconf = db.configQuery.copy() X = db.retrieve_datum(Key) P = db.retrieve_datum(Key, powers=True) X_m = pylab.mat(X.mean(0)) X_resamp = pylab.array( adb.resample_vector(X - pylab.mat(pylab.ones(X.shape[0])).T * X_m, prop)) X_resamp += pylab.mat(pylab.ones(X_resamp.shape[0])).T * X_m P_resamp = pylab.array(adb.resample_vector(P, prop)) seqStart = int(pylab.around(qconf['seqStart'] * prop)) qconf['seqStart'] = seqStart seqLength = int(pylab.around(qconf['seqLength'] * prop)) qconf['seqLength'] = seqLength tmpconf = db.configQuery db.configQuery = qconf res = db.query_data(featData=X_resamp, powerData=P_resamp) res_resorted = adb.sort_search_result(res.rawData) db.configQuery = tmpconf return res_resorted
def _istftm(self, X_hat=None, Phi_hat=None, pvoc=False, usewin=True, resamp=None): """ :: Inverse short-time Fourier transform magnitude. Make a signal from a |STFT| transform. Uses phases from self.STFT if Phi_hat is None. Inputs: X_hat - N/2+1 magnitude STFT [None=abs(self.STFT)] Phi_hat - N/2+1 phase STFT [None=exp(1j*angle(self.STFT))] pvoc - whether to use phase vocoder [False] usewin - whether to use overlap-add [False] Returns: x_hat - estimated signal """ if not self._have_stft: return None X_hat = self.X if X_hat is None else P.np.abs(X_hat) if pvoc: self._pvoc(X_hat, Phi_hat, pvoc) else: Phi_hat = P.angle(self.STFT) if Phi_hat is None else Phi_hat self.X_hat = X_hat * P.exp(1j * Phi_hat) if usewin: if self.win is None: self.win = P.ones( self.wfft) if self.window == 'rect' else P.np.sqrt( P.hanning(self.wfft)) if len(self.win) != self.nfft: self.win = P.r_[self.win, P.np.zeros(self.nfft - self.wfft)] if len(self.win) != self.nfft: error.BregmanError( "features_base.Features._istftm(): assertion failed len(self.win)==self.nfft" ) else: self.win = P.ones(self.nfft) if resamp: self.win = sig.resample(self.win, int(P.np.round(self.nfft * resamp))) fp = self._check_feature_params() self.x_hat = self._overlap_add(P.real(P.irfft(self.X_hat.T)), usewin=usewin, resamp=resamp) if self.verbosity: print("Extracted iSTFTM->self.x_hat") return self.x_hat
def evaluate(self, seq_length=None, query_duration=None, tempo=1.0, gt_only=True): """ :: Evaluate loop over ground truth: query_duration varies with respect to tempo: query_duration - fractional seconds (requires adb.delta_time) OR seq_length - integer length of query sequence gt_only = if True, return only ground-truth results otherwise return full database results """ if not tempo: tempo = 1.0 seq_length = self.set_seq_length(seq_length, query_duration) lzt_keys, lzt_lengths = self.get_adb_lists() ranks = pylab.ones( (len(self.ground_truth), len(lzt_keys))) * float('inf') dists = pylab.ones( (len(self.ground_truth), len(lzt_keys))) * float('inf') gt_list, gt_orig = self.initialize_search(seq_length, tempo) gt_orig_keys, gt_orig_lengths = zip(*gt_orig) gt_keys, gt_lengths = zip(*gt_list) # Loop over ground truth keys self.adb.configQuery['seqLength'] = seq_length for i, q in enumerate(gt_keys): # Search if tempo == 1.0: res = self.adb.query(key=q).rawData else: res = audiodb.adb.tempo_search(db=self.adb, Key=q, tempo=tempo) r_keys, r_dists, q_pos, r_pos = zip(*res) q_idx = gt_orig_keys.index(q) for r_idx, s in enumerate(lzt_keys): try: k = r_keys.index(s) ranks[q_idx][r_idx] = k dists[q_idx][r_idx] = r_dists[k] except ValueError: # print "Warning: adb key ", s, "not found in result." pass self.ranks = ranks self.dists = dists if gt_only: ranks, dists = self.reduce_evaluation_to_gt( ranks, dists, query_duration=query_duration) return ranks, dists
def _stft(self): if not self._have_x: print( "Error: You need to load a sound file first: use self.load_audio('filename.wav')" ) return False fp = self._check_feature_params() num_frames = len(self.x) self.STFT = P.zeros((self.nfft / 2 + 1, num_frames), dtype='complex') self.win = P.ones(self.wfft) if self.window == 'rect' else P.np.sqrt( P.hanning(self.wfft)) x = P.zeros(self.wfft) buf_frames = 0 for k, nex in enumerate(self.x): x = self._shift_insert(x, nex, self.nhop) if self.nhop >= self.wfft - k * self.nhop: # align buffer on start of audio self.STFT[:, k - buf_frames] = P.rfft(self.win * x, self.nfft).T else: buf_frames += 1 self.STFT = self.STFT / self.nfft self._fftfrqs = P.arange( 0, self.nfft / 2 + 1) * self.sample_rate / float(self.nfft) self._have_stft = True if self.verbosity: print("Extracted STFT: nfft=%d, hop=%d" % (self.nfft, self.nhop)) self.inverse = self._istftm self.X = abs(self.STFT) if not self.magnitude: self.X = self.X**2 return True
def testfunction(data): # N-D Gaussian or N-D Runge Function N, sd = data.shape f = ones((N,1)) for i in range(sd): #f = f*array([exp(-15*(data[:,i]-0.5)**2)]).T f = f*array([1./(1+(5*data[:,i])**2)]).T return f
def clarinet_funct(attack, sustain, release, sampling_frecuenciy, A0, I0): ta = pyl.arange(0, attack - 1 / sampling_frecuenciy, 1 / sampling_frecuenciy) y1 = pyl.exp(ta / attack * 1.5) - 1 y1 = y1 / max(y1) y1 = np.append(y1, pyl.ones([1, round(sustain * sampling_frecuenciy)])) tr = pyl.arange(0, (release / 2 - 1 / sampling_frecuenciy), 1 / sampling_frecuenciy) y3 = pyl.exp((release / 2 - tr) / release * 3) - 1 y4 = pyl.ones(len(y3)) - y3[::-1] y3 = y3 / max(y3) / 2 y2 = np.append(y1, pyl.ones([1, round(release * sampling_frecuenciy)])) y1 = np.append(y1, y4) y1 = np.append(y1, y3) lenght = min(len(y1), len(y2)) y1 = y1[:lenght] y2 = y2[:lenght] y2 = -I0 * y2 + 4 y1 = A0 * y1 return [y1, y2] # [A(t),I(t)]
def close_points(X, s=1): lambda_s = 1 lambda_c = s X = P.array(X) K, N = X.shape M = P.zeros((N, N)) M[range(N), range(N)] = 2 * lambda_c / (N - 1) + lambda_s / N # M[0,0] -= lambda_c/(N-1) # M[-1,-1] -= lambda_c/(N-1) d = P.diag(P.ones(N - 1), 1) M = M - lambda_c * (d + d.T) / (N - 1) M[0, 0] = lambda_s / N M[-1, -1] = lambda_s / N M[0, 1] = 0 M[-1, -2] = 0 Mi = P.pinv(M) smooth_X = (lambda_s / N) * Mi.dot(X.T).T return smooth_X
def rank_by_distance_bhatt(self, qkeys, ikeys, rkeys, dists): """ :: Reduce timbre-channel distances to ranks list by ground-truth key indices Bhattacharyya distance on timbre-channel probabilities and Kullback distances """ # timbre-channel search using pre-computed distances ranks_list = [] t_keys, t_lens = self.get_adb_lists(0) rdists = pylab.ones(len(t_keys)) * float('inf') qk = self._get_probs_tc(qkeys) for i in range(len(ikeys[0])): # number of include keys ikey = [] dk = pylab.zeros(self.timbre_channels) for t_chan in range(self.timbre_channels): # timbre channels ikey.append(ikeys[t_chan][i]) try: # find dist of key i for query i_idx = rkeys[t_chan].index( ikey[t_chan]) # dataset include-key match # the reduced distance function in include_keys order # distance is Bhattacharyya distance on probs and dists dk[t_chan] = dists[t_chan][i_idx] except: print("Key not found in result list: ", ikey, "for query:", qkeys[t_chan]) raise error.BregmanError() rk = self._get_probs_tc(ikey) a_idx = t_keys.index(ikey[0]) # audiodb include-key index rdists[a_idx] = distance.bhatt( pylab.sqrt(pylab.absolute(dk)), pylab.sqrt(pylab.absolute(qk * rk))) #search for the index of the relevant keys rdists = pylab.absolute(rdists) sort_idx = pylab.argsort(rdists) # Sort fields into database order for r in self.ground_truth: # relevant keys ranks_list.append(pylab.where( sort_idx == r)[0][0]) # Rank of the relevant key return ranks_list, rdists
def rank_by_distance_avg(self, qkeys, ikeys, rkeys, dists): """ :: Reduce timbre-channel distances to ranks list by ground-truth key indices Kullback distances """ # timbre-channel search using pre-computed distances ranks_list = [] t_keys, t_lens = self.get_adb_lists(0) rdists = pylab.ones(len(t_keys)) * float('inf') for t_chan in range(self.timbre_channels): # timbre channels t_keys, t_lens = self.get_adb_lists(t_chan) for i, ikey in enumerate(ikeys[t_chan]): # include keys, results try: # find dist of key i for query i_idx = rkeys[t_chan].index( ikey) # lower_bounded include-key index a_idx = t_keys.index(ikey) # audiodb include-key index # the reduced distance function in include_keys order # distance is the sum for now if t_chan: rdists[a_idx] += dists[t_chan][i_idx] else: rdists[a_idx] = dists[t_chan][i_idx] except: print("Key not found in result list: ", ikey, "for query:", qkeys[t_chan]) raise error.BregmanError() #search for the index of the relevant keys rdists = pylab.absolute(rdists) sort_idx = pylab.argsort(rdists) # Sort fields into database order for r in self.ground_truth: # relevant keys ranks_list.append(pylab.where( sort_idx == r)[0][0]) # Rank of the relevant key return ranks_list, rdists
def Make_Z_Plot(grbdict, z_key='grbox_z', Nbins=31, z_cutoff=4.0): '''Given any dictionary which has the redshift as one of the keywords, plot a histogram of those redshifts. ''' z_list = [] zname_list = [] date_list = [] # 'un'zip all_grbs (is there a better way to do this? just assign rather than a for loop?) for key, value in grbdict.iteritems(): # date_list.append(value['date']) z_list.append(value[z_key]) zname_list.append(key) print '***All***' print ' ' print z_list print len(z_list) ### Print out the most distant GRB as a function of time zmax = 0.0 rr = [] for key, value in grbdict.iteritems(): if value[z_key] > zmax: rr.append(key) zmax = value[z_key] # print value['date'].year + value['date'].timetuple().tm_yday/365.0, zmax, "# ", key ax = plt.subplot(111) n, bins, patches = plt.hist(plt.log10(z_list), bins=Nbins, facecolor='grey', edgecolor='grey') # Define pre-swift burst index as bursts before 041210 # high_z_i = plt.where(plt.array(date_list) < datetime.date(2004,12,10)) # high_z_list = [z_list[i] for i in list(high_z_i[0])] #print high_z_list # n, bins1, patches = plt.hist(plt.log10(high_z_list),bins=bins,facecolor='black',edgecolor='black',alpha=0.6) if overplot_high_z: high_z_list = [z for z in z_list if z > z_cutoff] n, bins1, patches = plt.hist(plt.log10(high_z_list), bins=bins, facecolor='black', edgecolor='black') ay = ax.twinx() argg = list(plt.ones(len(z_list)).cumsum().repeat(2)) zz = copy.copy(z_list) zz.sort() tmp = list(plt.log10(zz).repeat(2)) tmp.append(1) yy = [0] yy.extend(argg) ay.plot(tmp, yy, aa=True, linewidth=4, color='black') argg = list(plt.ones(len(high_z_list)).cumsum().repeat(2)) zz = copy.copy(high_z_list) zz.sort() tmp = list(plt.log10(zz).repeat(2)) tmp.append(1) yy = [0] yy.extend(argg) ay.plot(tmp, yy, aa=True, linewidth=2, color='grey') ay.set_ylim((0, len(z_list) * 1.05)) ay.set_ylabel("Cumulative Number", fontsize=20) # formatter for bottom x axis def ff(x, pos=None): if x < -1: return "%.2f" % (10**x) elif x < 0: return "%.1f" % (10**x) elif 10**x == 8.5: return "%.1f" % (10**x) else: return "%i" % (10**x) formatter = FuncFormatter(ff) ax.set_xticks([ -2, -1, plt.log10(0.3), 0, plt.log10(2), plt.log10(3), plt.log10(4), plt.log10(6), plt.log10(8.5) ]) ax.xaxis.set_major_formatter(formatter) ax.set_xlabel("Redshift ($z$)", fontsize=20) ax.set_ylabel("Number", fontsize=20) ax.set_xlim((plt.log10(0.005), plt.log10(10))) ax2 = ax.twiny() xlim = ax.get_xlim() #ax2.set_xscale("log") ax2.set_xlim((xlim[0], xlim[1])) # Define function for plotting the top X axis; time since big bang in Gyr def rr(x, pos=None): g = cosmocalc.cosmocalc(10.0**x, H0=71.0) if g['zage_Gyr'] < 1: return "%.2f" % g[ 'zage_Gyr'] # Return 2 dec place if age < 1; e.g 0.62 else: return "%.1f" % g[ 'zage_Gyr'] # Return 1 dec place if age > 1; e.g. 1.5 ax2.set_xticks( [-1.91, -1.3, -0.752, -0.283, 0.102, 0.349, 0.62, plt.log10(8.3)]) formatter1 = FuncFormatter(rr) ax2.xaxis.set_major_formatter(formatter1) ax2.set_xlabel("Time since Big Bang (Gyr)", fontsize=20) #plt.bar(l,a['yy'],width=w,log=False) #ax.set_xscale("log",nonposx='clip') ## Now plot inset plot of GRBs greater than z=z_cutoff axins = inset_axes( ax2, width="30%", # width = 30% of parent_bbox height="30%") # height : 1 inch) locator = axins.get_axes_locator() locator.set_bbox_to_anchor((-0.8, -0.45, 1.35, 1.35), ax.transAxes) locator.borderpad = 0.0 high_z_list = [z for z in z_list if z > z_cutoff] if high_z_list: # if there are any high-z's, plot them n, bins, patches = plt.hist(plt.array(high_z_list), facecolor='black', edgecolor='black') axins.set_xlim(z_cutoff, 8.5) axins.set_xlabel("z") axins.set_ylabel("N") # high_z_i = plt.where(plt.array(date_list) < datetime.date(2004,12,10)) # high_z_list = [z_list[i] for i in list(high_z_i[0]) if z_list[i] > z_cutoff] n, bins, patches = plt.hist(plt.array(high_z_list), bins=bins, facecolor='black', edgecolor='black') high_z_list = [z for z in z_list if z > z_cutoff] if high_z_list: # if there are any high-z's, plot them n, bins, patches = plt.hist(plt.array(high_z_list), facecolor='black', edgecolor='black') axins.set_xlim(z_cutoff, 9.0) #mark_inset(ax, axins, loc1=2, loc2=4, fc="none", ec="0.5") #axins2.set_xlabel("Time since Big Bang [Gyr]",fontsize=20) ylabels = ax.get_yticklabels() plt.setp(ylabels, size=14, name='times', weight='light', color='k') xlabels = ax.get_xticklabels() plt.setp(xlabels, size=14, name='times', weight='light', color='k') xlabels = ax2.get_xticklabels() plt.setp(xlabels, size=14, name='times', weight='light', color='k') xlabels = ay.get_yticklabels() plt.setp(xlabels, size=14, name='times', weight='light', color='k') plt.savefig('z_plot.eps') plt.draw() return z_list
def Make_Z_Plot(grbdict, z_key="grbox_z", Nbins=31, z_cutoff=4.0): """Given any dictionary which has the redshift as one of the keywords, plot a histogram of those redshifts. """ z_list = [] zname_list = [] date_list = [] # 'un'zip all_grbs (is there a better way to do this? just assign rather than a for loop?) for key, value in grbdict.iteritems(): # date_list.append(value['date']) z_list.append(value[z_key]) zname_list.append(key) print "***All***" print " " print z_list print len(z_list) ### Print out the most distant GRB as a function of time zmax = 0.0 rr = [] for key, value in grbdict.iteritems(): if value[z_key] > zmax: rr.append(key) zmax = value[z_key] # print value['date'].year + value['date'].timetuple().tm_yday/365.0, zmax, "# ", key ax = plt.subplot(111) n, bins, patches = plt.hist(plt.log10(z_list), bins=Nbins, facecolor="grey", edgecolor="grey") # Define pre-swift burst index as bursts before 041210 # high_z_i = plt.where(plt.array(date_list) < datetime.date(2004,12,10)) # high_z_list = [z_list[i] for i in list(high_z_i[0])] # print high_z_list # n, bins1, patches = plt.hist(plt.log10(high_z_list),bins=bins,facecolor='black',edgecolor='black',alpha=0.6) if overplot_high_z: high_z_list = [z for z in z_list if z > z_cutoff] n, bins1, patches = plt.hist(plt.log10(high_z_list), bins=bins, facecolor="black", edgecolor="black") ay = ax.twinx() argg = list(plt.ones(len(z_list)).cumsum().repeat(2)) zz = copy.copy(z_list) zz.sort() tmp = list(plt.log10(zz).repeat(2)) tmp.append(1) yy = [0] yy.extend(argg) ay.plot(tmp, yy, aa=True, linewidth=4, color="black") argg = list(plt.ones(len(high_z_list)).cumsum().repeat(2)) zz = copy.copy(high_z_list) zz.sort() tmp = list(plt.log10(zz).repeat(2)) tmp.append(1) yy = [0] yy.extend(argg) ay.plot(tmp, yy, aa=True, linewidth=2, color="grey") ay.set_ylim((0, len(z_list) * 1.05)) ay.set_ylabel("Cumulative Number", fontsize=20) # formatter for bottom x axis def ff(x, pos=None): if x < -1: return "%.2f" % (10 ** x) elif x < 0: return "%.1f" % (10 ** x) elif 10 ** x == 8.5: return "%.1f" % (10 ** x) else: return "%i" % (10 ** x) formatter = FuncFormatter(ff) ax.set_xticks([-2, -1, plt.log10(0.3), 0, plt.log10(2), plt.log10(3), plt.log10(4), plt.log10(6), plt.log10(8.5)]) ax.xaxis.set_major_formatter(formatter) ax.set_xlabel("Redshift ($z$)", fontsize=20) ax.set_ylabel("Number", fontsize=20) ax.set_xlim((plt.log10(0.005), plt.log10(10))) ax2 = ax.twiny() xlim = ax.get_xlim() # ax2.set_xscale("log") ax2.set_xlim((xlim[0], xlim[1])) # Define function for plotting the top X axis; time since big bang in Gyr def rr(x, pos=None): g = cosmocalc.cosmocalc(10.0 ** x, H0=71.0) if g["zage_Gyr"] < 1: return "%.2f" % g["zage_Gyr"] # Return 2 dec place if age < 1; e.g 0.62 else: return "%.1f" % g["zage_Gyr"] # Return 1 dec place if age > 1; e.g. 1.5 ax2.set_xticks([-1.91, -1.3, -0.752, -0.283, 0.102, 0.349, 0.62, plt.log10(8.3)]) formatter1 = FuncFormatter(rr) ax2.xaxis.set_major_formatter(formatter1) ax2.set_xlabel("Time since Big Bang (Gyr)", fontsize=20) # plt.bar(l,a['yy'],width=w,log=False) # ax.set_xscale("log",nonposx='clip') ## Now plot inset plot of GRBs greater than z=z_cutoff axins = inset_axes(ax2, width="30%", height="30%") # width = 30% of parent_bbox # height : 1 inch) locator = axins.get_axes_locator() locator.set_bbox_to_anchor((-0.8, -0.45, 1.35, 1.35), ax.transAxes) locator.borderpad = 0.0 high_z_list = [z for z in z_list if z > z_cutoff] if high_z_list: # if there are any high-z's, plot them n, bins, patches = plt.hist(plt.array(high_z_list), facecolor="black", edgecolor="black") axins.set_xlim(z_cutoff, 8.5) axins.set_xlabel("z") axins.set_ylabel("N") # high_z_i = plt.where(plt.array(date_list) < datetime.date(2004,12,10)) # high_z_list = [z_list[i] for i in list(high_z_i[0]) if z_list[i] > z_cutoff] n, bins, patches = plt.hist(plt.array(high_z_list), bins=bins, facecolor="black", edgecolor="black") high_z_list = [z for z in z_list if z > z_cutoff] if high_z_list: # if there are any high-z's, plot them n, bins, patches = plt.hist(plt.array(high_z_list), facecolor="black", edgecolor="black") axins.set_xlim(z_cutoff, 9.0) # mark_inset(ax, axins, loc1=2, loc2=4, fc="none", ec="0.5") # axins2.set_xlabel("Time since Big Bang [Gyr]",fontsize=20) ylabels = ax.get_yticklabels() plt.setp(ylabels, size=14, name="times", weight="light", color="k") xlabels = ax.get_xticklabels() plt.setp(xlabels, size=14, name="times", weight="light", color="k") xlabels = ax2.get_xticklabels() plt.setp(xlabels, size=14, name="times", weight="light", color="k") xlabels = ay.get_yticklabels() plt.setp(xlabels, size=14, name="times", weight="light", color="k") plt.savefig("z_plot.eps") plt.draw() return z_list
def getFakeData(self): fakeData = 800 * pylab.ones((ARRAY_SZ, )) fakeData[:200] = 0.0 * fakeData[:200] return fakeData