def generate(surf, num_beams, num_rays, out_filename): t = np.linspace(0., 1., NSTEPS) filedir = path.dirname(out_filename) if not path.exists(filedir): makedirs(filedir) if path.exists(out_filename ) and path.getmtime(out_filename) > path.getmtime(__file__): print("Dataset is up to date, skipping.") return with open(out_filename, 'w') as out_file: out_file.write('diff ' + str(num_beams * num_rays) + '\n') rays = 0 for pivotPhi in np.linspace(-PHIRANGE / 2, PHIRANGE / 2, num_beams): ax, ay = from_polar(RMIN, pivotPhi) for dstPhi in np.linspace(-PHIRANGE / 2, PHIRANGE / 2, num_rays): rays += 1 update_progress(rays * 100 / num_beams / num_rays) bx, by = from_polar(RMAX, dstPhi) func = np.vectorize(lambda x: surf(x * ax + (1 - x) * bx, x * ay + (1 - x) * by)) result = np.trapz(func(t), t) * sqrt((ax - bx)**2 + (ay - by)**2) out_file.write(str(ax) + ' ' + str(ay) + ' ') out_file.write(str(bx) + ' ' + str(by) + ' ') out_file.write(str(result) + '\n')
def main(): datafiles = [ path.join('./data', f) for f in os.listdir('./data') if path.isfile(path.join('./data', f)) ] pool = Pool() completedTasks = 0 totalTasks = 0 def mp_callback(task): nonlocal totalTasks, completedTasks completedTasks += 1 update_progress(completedTasks / totalTasks * 100) job_start = time.time() print('Generating jobs...') for datafile in datafiles: prefix = 'out/' + path.splitext(path.basename(datafile))[0] + '/' if not path.exists(prefix): os.makedirs(prefix) for solverType in ['ART', 'SIRT']: solver_flag = ['--solver=' + solverType] for name, cfg in config.items(): outname = prefix + solverType + '-' + name + '.png' if path.exists(outname): continue totalTasks += 1 #reconstruct(datafile, solver_flag + cfg, outname) pool.apply_async(reconstruct, args=(datafile, solver_flag + cfg, outname), callback=mp_callback) print('Generated ' + str(totalTasks) + ' jobs.') update_progress(0) pool.close() pool.join() if totalTasks == 0: update_progress(100) job_end = time.time() print('Took {0:1f}s'.format(job_end - job_start))
def main(): datafiles = [path.join('./data', f) for f in os.listdir('./data') if path.isfile(path.join('./data', f))] pool = Pool() completedTasks = 0 totalTasks = 0 def mp_callback(task): nonlocal totalTasks, completedTasks completedTasks += 1 update_progress(completedTasks / totalTasks * 100) job_start = time.time() print('Generating jobs...') for datafile in datafiles: prefix = 'out/' + path.splitext(path.basename(datafile))[0] + '/' if not path.exists(prefix): os.makedirs(prefix) for solverType in ['ART', 'SIRT']: solver_flag = ['--solver=' + solverType] for name, cfg in config.items(): outname = prefix + solverType + '-' + name + '.png' if path.exists(outname): continue totalTasks += 1 #reconstruct(datafile, solver_flag + cfg, outname) pool.apply_async(reconstruct, args = (datafile, solver_flag + cfg, outname), callback = mp_callback) print('Generated ' + str(totalTasks) + ' jobs.') update_progress(0) pool.close() pool.join() if totalTasks == 0: update_progress(100) job_end = time.time() print('Took {0:1f}s'.format(job_end - job_start))
def generate(surf, num_beams, num_rays, out_filename): t = np.linspace(0., 1., NSTEPS) filedir = path.dirname(out_filename) if not path.exists(filedir): makedirs(filedir) if path.exists(out_filename) and path.getmtime(out_filename) > path.getmtime(__file__): print("Dataset is up to date, skipping.") return with open(out_filename, 'w') as out_file: out_file.write('diff ' + str(num_beams * num_rays) + '\n') rays = 0 for pivotPhi in np.linspace(-PHIRANGE / 2, PHIRANGE / 2, num_beams): ax, ay = from_polar(RMIN, pivotPhi) for dstPhi in np.linspace(-PHIRANGE / 2, PHIRANGE / 2, num_rays): rays += 1 update_progress(rays * 100 / num_beams / num_rays) bx, by = from_polar(RMAX, dstPhi) func = np.vectorize(lambda x: surf(x * ax + (1 - x) * bx, x * ay + (1 - x) * by)) result = np.trapz(func(t), t) * sqrt((ax - bx) ** 2 + (ay - by) ** 2) out_file.write(str(ax) + ' ' + str(ay) + ' ') out_file.write(str(bx) + ' ' + str(by) + ' ') out_file.write(str(result) + '\n')
def mp_callback(task): nonlocal totalTasks, completedTasks completedTasks += 1 update_progress(completedTasks / totalTasks * 100)