sys.path.insert(0, "./") import csgan as cs if os.path.exists('../dataset/'): prefix = '../dataset/' elif os.path.exists('../../dataset/'): prefix = '../../dataset/' else: raise Exception('Dataset not found!') dataset_files = [ 'map1n_allz_rtaapixlw_2048_1.fits', 'map1n_allz_rtaapixlw_2048_2.fits', 'map1n_allz_rtaapixlw_2048_3.fits' ] dp = cs.Data_Provider([prefix + file_name for file_name in dataset_files], preprocess_mode=2) batch_size = 16 image_size = 128 gf_dim = 64 df_dim = 64 z_dim = 128 def dpp(n): return dp(n, image_size).reshape(n, image_size, image_size, 1) ################################################################## from csgan.ops import lrelu, conv2d, linear import tensorflow as tf
d =np.sqrt(scharrx**2+scharry**2) return d def filt_all(maps,func): out1 = [] for m in maps: out1.append(func(m)) return np.stack([maps,np.array(out1)],axis=3) def func(dt): return canny(dt,0,'none','sch') dp = cs.Data_Provider('../../../strings/map1n_allz_rtaapixlw_4096_1b') ims = 512 import tensorflow as tf def conv2d(x, W): return tf.nn.conv2d(input=x, filter=W, strides=[1, 1, 1, 1], padding='SAME') def avg_pool_2x2(x): return tf.nn.avg_pool(x, ksize=[1, 2, 2, 1], strides=[1, 2, 2, 1], padding='SAME') def discriminator(x_image, reuse=False, n_channel=2): with tf.variable_scope('discriminator') as scope: if (reuse): tf.get_variable_scope().reuse_variables() #First Conv and Pool Layers
out1 = [] for m in maps: out1.append(func(m)) return np.stack([maps, np.array(out1)], axis=3) def func(dt): return canny(dt, 0, 'none', 'sch') file_list = [ '../../dataset/map1n_allz_rtaapixlw_2048_' + str(i) + '.fits' for i in range(1, 4) ] dp = cs.Data_Provider(file_list, preprocess_mode=2) #dt = filt_all(dp(10,128),func) #dt.shape #fig,(ax1,ax2)=plt.subplots(1,2,figsize=(8,18)) #ax1.imshow(dt[0,:,:,0]) #ax2.imshow(dt[0,:,:,1]) batch_size = 64 image_size = 256 checkpoint_dir = './checkpoint/' + sys.argv[0][:-3] sample_dir = './samples/' + sys.argv[0][:-3] def dpp(n): # return dp(n,image_size).reshape(n,image_size,image_size,1)
out1 = [] for m in maps: out1.append(func(m)) return np.stack([maps, np.array(out1)], axis=3) def func(dt): return canny(dt, 0, 'none', 'sch') file_list = [ '../../dataset/map1n_allz_rtaapixlw_2048_' + str(i) + '.fits' for i in range(1, 2) ] dp = cs.Data_Provider(file_list) #dt = filt_all(dp(10,128),func) #dt.shape #fig,(ax1,ax2)=plt.subplots(1,2,figsize=(8,18)) #ax1.imshow(dt[0,:,:,0]) #ax2.imshow(dt[0,:,:,1]) batch_size = 512 image_size = 128 gf_dim = 8 z_dim = 4096 #(image_size/16)**2*gf_dim*8==z_dim