def main(): import argparse parser = argparse.ArgumentParser( description="""Denoise measured field strength (H) curves.""", formatter_class=argparse.RawDescriptionHelpFormatter) # ------------------------------------------------- # ungrouped meta options parser.add_argument('-v', '--version', action='version', version=('%(prog)s ' + __version__)) parser.add_argument( '-l', '--list', dest='listfiles', action='store_true', help='List available stress levels (measurement data files) and exit.') parser.set_defaults(listfiles=False) # ------------------------------------------------- # data options group_data = parser.add_argument_group('data', 'Data file options.') group_data.add_argument( '-s', '--sigma', dest='sigma', default=0, type=int, metavar='x', help= 'Stress level, selects the corresponding data file. (This expects only the number in MPa, leaving out the unit.) (default: %(default)s).' ) group_data.add_argument( '-p', '--path', dest='path', default=".", type=str, metavar='my/directory/path', help= 'Path where to look for measurement data files (default: current working directory).' ) # ------------------------------------------------- # http://parezcoydigo.wordpress.com/2012/08/04/from-argparse-to-dictionary-in-python-2-7/ kwargs = vars(parser.parse_args()) if kwargs["listfiles"]: util.listfiles(kwargs["path"]) sys.exit(0) else: lam = scrub(kwargs["sigma"], kwargs["path"], show=True, verbose=True)
def main(path): # get data files in specified directory # data_items = util.listfiles(path, verbose=False) for sigma, input_filename in data_items: output_filename = re.sub(r'\.mat$', r'_denoised.mat', input_filename) print("Scrubbing '%s' --> '%s'..." % (input_filename, output_filename)) H = filter_H.scrub(sigma, path) B = filter_B.scrub(sigma, path) pol = filter_pol.scrub(sigma, path) lam = filter_magnetostriction.scrub(sigma, path) assert H.shape == B.shape == pol.shape == lam.shape A = np.empty((H.shape[0], 4), dtype=np.float64) A[:, 0] = H[:] A[:, 1] = B[:] A[:, 2] = pol[:] A[:, 3] = lam[:] scipy.io.savemat(output_filename, mdict={'A': A}) print("All done.")
def runthreading(): pool = ThreadPoolExecutor(1) jpgname = listfiles(path, "jpg") for item in jpgname: # 识别过的就不再识别了 if len(item) > 30: pool.submit(main, item)
def filltree(self,path): self.currentfiles.clear() lst = os.listdir(self.dir) files = util.listfiles(path) dirs = util.listdirs(path) counter = 0 for p in dirs: self.currentfiles.append([p + "/"]) counter += 1 for p in files: self.currentfiles.append([p]) counter += 1 self.len = len(lst)
def main(path): # get list of raw data files in specified directory # data_items = util.listfiles(path, verbose=False) for sigma,input_filename in data_items: # for dummy,input_filename in data_items: if sigma != 0: # XXX DEBUG continue input_filename = re.sub( r'\.mat$', r'_denoised.mat', input_filename ) # denoised data files output_filename = re.sub( r'\.mat$', r'_singlevalued.mat', input_filename ) print( "Single-valuizing '%s' --> '%s'..." % (input_filename, output_filename) ) try: data = scipy.io.loadmat(input_filename) except FileNotFoundError: import sys print( "Data file named '%s' not found, exiting (use --list to see available data files)" % (input_filename), file=sys.stderr ) sys.exit(1) A = data["A"] H = A[:,0] # Field strength H (A/m) B = A[:,1] # Flux density B (T) pol = A[:,2] # magnetic polarization J = B - mu0*H lam = A[:,3] # Magnetostriction lambda (ppm) assert H.shape == B.shape == pol.shape == lam.shape # # de-hysterize # xx,yy = fit_1d_weighted_average_localr(B,H) # # # symmetrize w.r.t. B = 0 # # # # we average the positive and negative parts. # # # imid = yy.shape[0]//2 # ymid = yy[imid] ## tmp = 0.5 * ((ymid - yy[imid-1::-1]) + (yy[imid+1:] - ymid)) # tmp = 0.5 * (yy[imid+1:] - yy[imid-1::-1]) # equivalent # yy2 = np.empty_like(yy) # yy2[imid-1::-1] = ymid - tmp # yy2[imid+1:] = ymid + tmp # yy2[imid] = ymid # # # DEBUG TEST - swap pos/neg parts # tmp_p = yy[imid+1:] - ymid # tmp_n = ymid - yy[imid-1::-1] # yy3 = np.empty_like(yy) # yy3[imid-1::-1] = ymid - tmp_p # yy3[imid+1:] = ymid + tmp_n # yy3[imid] = ymid # xx,yy = fit_1d_doeverything(B,H) # xx,yy = take_positive_half(xx, yy) xout = [] yout = [] xdata = [B, pol, H] ydata = [H, H, lam] xlabels = [r"$B$", r"$pol$", r"$H$"] ylabels = [r"$H$", r"$H$", r"$\lambda$"] deoffsetters = [_deoffset_rawdata, _deoffset_rawdata, _deoffset_lam_data] fs = [fit_1d_doeverything, fit_1d_doeverything, fit_1d_doeverything_for_lam] for xx,yy,f in zip(xdata,ydata,fs): xx2,yy2 = f(xx,yy) xx2,yy2 = take_positive_half(xx2,yy2) xout.append(xx2) yout.append(yy2) # plt.figure(1, figsize=(9,6)) # plt.clf() # x,y = _deoffset_rawdata(B,H) # plt.plot(x, y, color='#d0d0d0', linestyle='solid') # plt.plot(xx, yy, color='#909090', linestyle='solid') # plt.plot(xx, yy3, color='#909090', linestyle='dashed') # DEBUG: pos/neg parts swapped # plt.plot(xx, yy2, color='k', linestyle='solid') # plt.xlabel(r"$B$") # plt.ylabel(r"$H$") plt.figure(1, figsize=(14,6)) plt.clf() nplots = len(ydata) for i,deof,xx_raw,xx_filt,xlabel,yy_raw,yy_filt,ylabel in \ zip(range(nplots), deoffsetters, xdata, xout, xlabels, ydata, yout, ylabels): ax = plt.subplot(1,nplots, i+1) x,y= deof(xx_raw,yy_raw) ax.plot(x, y, color='#d0d0d0', linestyle='solid') ax.plot(xx_filt, yy_filt, color='#909090', linestyle='solid') ax.axis( [np.min(xx_filt), np.max(xx_filt), np.min(yy_filt), np.max(yy_filt)] ) axis_marginize(ax, 0.02, 0.02) ax.grid(b=True, which='both') ax.set_title(r"%s(%s)" % (ylabel, xlabel)) break # We have no guarantees that x starts from 0. Fix this. # tol = 1e-8 if xout[0][0] < tol: xout[0][0] = 0 if xout[1][0] < tol: xout[1][0] = 0 # HACK: the lambda curve is fitted with the axes swapped. if yout[2][0] < tol: yout[2][0] = 0 else: raise ValueError("something went wrong, lambda curve does not start from zero (got %g)" % (yout[2][0])) # Clip data to the smallest common max(H) # xs = [xout[0], xout[1], yout[2]] ys = [yout[0], yout[1], xout[2]] fs = [] max_minx = -np.inf min_maxx = +np.inf for x,y in zip(xs,ys): min_maxx = min(np.max(x), min_maxx) max_minx = max(np.min(x), max_minx) fs.append( scipy.interpolate.interp1d(x,y) ) # Interpolate to a common grid on the H axis # xx = np.linspace(0, min_maxx, 10001) yout2 = [] for f in fs: yout2.append( f(xx) ) # Save. # A = np.empty( (xx.shape[0],4), dtype=np.float64 ) A[:,0] = xx A[:,1] = yout2[0] A[:,2] = yout2[1] A[:,3] = yout2[2] scipy.io.savemat( output_filename, mdict={ 'A' : A } ) print( "All done." )
#!/usr/bin/python3.4 # -*- coding: utf-8 -*- # 本脚本为切割图片变成训练样本 from PIL import Image import time import random import os from util import listfiles if __name__ == '__main__': path = "../jpg/img/" os.mkdir("../jpg/letter") jpgname = listfiles(path, "jpg") for item in jpgname: try: jpgpath = item im = Image.open(jpgpath) # jpg不是最低像素,gif才是,所以要转换像素 im = im.convert("P") # 打印像素直方图 his = im.histogram() values = {} for i in range(0, 256): values[i] = his[i] # 排序,x:x[1]是按照括号内第二个字段进行排序,x:x[0]是按照第一个字段
if __name__ == '__main__': numset = ['0','1','2','3','4','5','6','7','8','9'] symbol_set = [] iconset = ['a', 'b', 'c', 'd', 'e', 'f', 'g', 'h', 'i', 'j', 'k', 'l', 'm', 'n', 'o', 'p', 'q', 'r', 's', 't', 'u', 'v', 'w', 'x', 'y', 'z'] imageset = [] for letter in iconset: for img in os.listdir('../iconset1/%s/' % (letter)): temp = [] if img != "Thumbs.db" and img != ".DS_Store": temp.append(buildvector(Image.open("../iconset1/%s/%s" % (letter, img)))) imageset.append({letter: temp}) path = "../jpg/letter/" jpgname = listfiles(path, "gif") # ../jpg/letter/20161210145303813.gif for item in jpgname: print(item) try: # 加载训练集 v = VectorCompare() guess = [] # 这样子写是为了close文件 # 不然报错:[WinError 32] 另一个程序正在使用此文件,进程无法访问。: '../jpg/letter/201612101452081010.gif' im3 = Image.open(item) # 将切割得到的验证码小片段与每个训练片段进行比较 for image in imageset: for x, y in image.items():