コード例 #1
0
ファイル: american.py プロジェクト: hodleth/graveyard
def american(n, T, sigma, s_nil, K):
   dt = T*252
   u = random.randn(dt)* sigma/sqrt(dt)
   z = cumprod(1+random.randn(n,dt)*(sigma/sqrt(dt), 1))*s_nil
   payoffs = (z[:,-1] - 100) * ((z[:,-1] - 100) > 0)
   price = mean(payoffs)
   print(payoffs, price)
コード例 #2
0
ファイル: NN.py プロジェクト: Blaver/MyDBN
 def __init__(self, layers, l_rate, mu = 0, sigma = 0.01):
     self.depth = len(layers)
     self.l_rate = l_rate
     
     self.layers  = [ mat([0 for items in range(layers[row])], dtype = float) for row in range(self.depth)]
     self.bias    = [ mat([sigma*random.randn() for items in range(layers[row])], dtype = float) for row in range(self.depth)]
     self.b_grads = [ mat([0 for items in range(layers[row])], dtype = float) for row in range(self.depth)]
     self.weights = [ mat([ [sigma*random.randn() for z in range(layers[x+1])] for y in range(layers[x])], dtype = float) for x in range(self.depth-1)]
     self.w_grads = [ mat([ [0 for z in range(layers[x])] for y in range(layers[x+1])], dtype = float) for x in range(self.depth-1)]
     self.label = 0
コード例 #3
0
def serie(size, freq, factor=None, phase=None):
    noise = .01 * random.randn(ts.shape[0])
    if freq is None: return np.abs(fft.fft(noise, size))
    if factor is None: factor = 1 + .02 * (random.rand() - .5)
    if phase is None: phase = 2j * np.pi * random.rand()
    freq_ = factor * freq
    ys = np.exp(2j * np.pi * freq_ * ts - phase) + noise
    return np.abs(fft.fft(ys, size))
コード例 #4
0
ファイル: __init__.py プロジェクト: chronosdb/pychronos
def makePeriodSeries(nper=10, name=None, freq="B", dtype="int64", start=None):
    if dtype == "int64":
        data = [random.randint(0, 100) for i in range(nper)]
    elif dtype == "float64":
        data = random.randn(nper)
    elif dtype == 'bool':
        data = random.randn(nper) < 0
    elif dtype == 'categorical':
        cat = {0: "zero", 1: "one", 2: "two"}
        data = pd.Categorical([random.randint(0, 3) for i in range(10)],
                              categories=cat,
                              ordered=True)
    else:
        raise ValueError("unsupported dtype, {}".format(dtype))

    return pd.Series(data,
                     index=makePeriodIndex(nper, freq=freq, start=start),
                     name=name)
コード例 #5
0
ファイル: LR_MPI.py プロジェクト: Blaver/MyDBN
    def __init__(self, input_size, output_size, l_rate, myrank = 0, mu = 0, sigma = 0.01):
        #inputs and outputs
        self.inputs = mat([0 for item in range(input_size)], dtype=float)
        self.outs = mat([0 for item in range(output_size)], dtype=float)

        #parameters
        self.bias = mat([0 for item in range(output_size)], dtype=float)
        self.weights = mat([[0 for col in range(output_size)] for row in range(input_size)], dtype=float)

        #gradients of parameters
        self.b_grads = mat([0 for item in range(output_size)], dtype=float)
        self.w_grads = mat([[0 for col in range(output_size)] for row in range(input_size)], dtype=float)

        #records of vector sizes
        self.int_sz = input_size
        self.out_sz = output_size

        #record label of current input
        self.label = 0

        #record testing data
        self.accepted = 0
        self.count = 0
        self.record = range(10)
        
        #learning rate
        self.l_rate = l_rate

        #parameters for initialize biases and weights
        self.mu = mu
        self.sigma = sigma

        '''----MPI----'''
        #MPI rank
        self.rank = myrank
        '''----MPI----'''
        
        #initialize biases and weights
        for i in range(output_size):
            self.bias[0, i] = sigma*random.randn()

        for i in range(0, input_size):
            for j in range(0, output_size):
                self.weights[i, j] = sigma*random.randn()
コード例 #6
0
def reset_network(n1=6, n2=7, random=np.random):
    global W1, W2, W3, b1, b2, b3
    W1 = random.randn(n1, 1) / 2
    W2 = random.randn(n2, n1) / 2
    W3 = random.randn(2, n2) / 2
    b1 = random.randn(n1, 1) / 2
    b2 = random.randn(n2, 1) / 2
    b3 = random.randn(2, 1) / 2
コード例 #7
0
def monte_carlo_simulator(num, ttm, vol, price, strike):
    delta = ttm * 252
    data_points = random.randn(delta) * vol / sqrt(delta)
    plt.hist(data_points)
    plt.title("Histogram")
    plt.xlabel("Value")
    plt.ylabel("Frequency")

    random_walk = cumprod(1 + random.randn(num, delta) * vol / sqrt(delta), 1) * price
    plt.plot(random_walk)
    plt.show()
    plt.title("Geometric Random Walk")
    plt.xlabel("Time")
    plt.ylabel("Stock Price")

    for point in random_walk:
      plt.plot(point)
    plt.show()
    plt.hist(random_walk[:,-1],40)
    plt.show()
    option_payoff = (random_walk[:,-1] - 100) * ((random_walk[:,-1] - 100) > 0)
    price = mean(option_payoff)
    print(price)
コード例 #8
0
ファイル: functional.py プロジェクト: WoodyAhn/pytorchvision
def inv_speckle_noise(image, sigma=0.5):
    lab = cv2.cvtColor(image, cv2.COLOR_RGB2LAB)
    gray, a, b = cv2.split(lab)
    gray = gray.astype(np.float32) / 255
    H, W = gray.shape

    noise = sigma * random.randn(H, W)
    noise = np.array([random.random() for i in range(H * W)])
    noise = noise.reshape(H, W)
    noisy = gray + (1 - gray) * noise

    noisy = (np.clip(noisy, 0, 1) * 255).astype(np.uint8)
    lab = cv2.merge((noisy, a, b))
    image = cv2.cvtColor(lab, cv2.COLOR_LAB2RGB)
    return image
コード例 #9
0
def main_optimal():
    x0 = random.randn(2)
    x0_g = random.rand(2)
    start_time = time.time()
    x_min = fmin(neg_f, x0)
    print(type((x_min)))
    delta = 3
    x_glob = basinhopping(neg_f, x0_g)
    stop_time = time.time()
    print('time', stop_time - start_time)
    # print(stop_time-start_time)
    x_glob_p = x_glob['x']
    x_knots = linspace(x_min[0] - delta, x_min[0] + delta, 41)
    y_knots = linspace(x_min[1] - delta, x_min[1] + delta, 41)
    X, Y = meshgrid(x_knots, y_knots)
    Z = zeros(X.shape)
    for i in range(Z.shape[0]):
        for j in range(Z.shape[1]):
            Z[i][j] = f([X[i, j], Y[i, j]])

    ax = Axes3D(figure(figsize=(8, 5)))
    ax.plot_surface(X,
                    Y,
                    Z,
                    rstride=1,
                    cstride=1,
                    cmap=cm.coolwarm,
                    linewidth=0.4)
    ax.plot([x0[0]], [x0[1]], [f(x0)],
            color='g',
            marker='o',
            markersize=20,
            label='initial')
    ax.plot([x_min[0]], [x_min[1]], [f(x_min)],
            color='k',
            marker='o',
            markersize=20,
            label='lokal')
    ax.plot([x_glob['x'][0]], [x_glob['x'][1]], [f(x_glob_p)],
            color='b',
            marker='o',
            markersize=10,
            label='glob')

    ax.legend()
    show()
コード例 #10
0
def x_glob():
    x0 = random.randn(2)

    x_min = basinhopping(neg_f, x0)
    print(x_min)
コード例 #11
0
              [0,0,0,1,0,0],
              [0,0,0,0,1,0],
              [0,0,0,0,0,1]])

BS1 = np.mat([3000, 100, 20])
BS2 = np.mat([200, 3000, 50])
BS3 = np.mat([300, 400, 3000])
BSb = np.mat([450, -200, 100])

dat = 1.35
Q = dat*np.eye(3, dtype = int)
# print('Q=',Q)
TSOA = math.sqrt(10)
TDOA = math.sqrt(10)
# W = np.sqrt(Q)*random.randn(3, 1)
W = np.square(Q)*random.randn(3, 1)
# print('W=',W)
R = np.diag([TDOA, TDOA, TDOA])
# print('R=',R)
G = np.array([[T*T/2.0, 0, 0],
              [0, T*T/2.0, 0],
              [0, 0, T*T/2.0]])
# G = np.eye(3,dtype = T^2/2)   错误代码语句
# print('G = ',G)

def  h_pre(x):
    global T, BS1, BS2, BS3, BSb
    # 距离公式
    h1 = np.zeros((1, 3))
    h1[:, 0] = math.sqrt((x[0] - BS1[0])**2 + (x[1] - BS1[1])**2 + (x[2] - BS1[2]**2))
    h1[:, 1] = math.sqrt((x[0] - BS2[0])**2 + (x[1] - BS2[1])**2 + (x[2] - BS2[2]**2))
コード例 #12
0
Some packages have multiple *modules*, or sets of related functions, which can be loaded separately. For example, `numpy`'s `random` module contains functions
for generating random numbers.

from numpy import random

You can even import specific functions from packages or modules.
For example, `randn` generates numbers from the standard normal distribution.

from numpy.random import randn

Now each of these three function calls does the same thing:
generate 10 random numbers from the standard normal distribution.

randn(10)

random.randn(10)

# Type out the third, using the np prefix from before.
# Also try using tab after the dots for code completion and and open parenthesis
# for function documentation.
# ANSWER:
np.random.randn(10)

## Vector operations in `numpy` and `pandas`

Vector/array operations are integral to scientific computing in Python. Like `gen` in Stata and `apply` in R, `numpy` and `pandas` include rich
sets of vectorized functions to run common code over records quickly.

One way to take advantage of this is to pass lists to `numpy` functions; this often produces a new `array` with the same number of elements.

np.exp([0, 1, 2])
コード例 #13
0
class BPNN(object):
    def __init__(self,sizes):
        self.sizes = sizes
        self.num_layers = len(sizes)
        self.w_ = [random.randn(x,y) for x,y in zip(sizes[1:],sizes[:-1])]
        self.b_ = [random.randn(y,1) for y in sizes[1:]]
コード例 #14
0
 def random_weight(self):
     w = 0.5 + random.randn() / 2
     return w
コード例 #15
0
    def plot(self, volatilidad, Time_mature, k):
        print("aca")

        # a figure instance to plot on
        self.figure = Figure()

        # this is the Canvas Widget that displays the `figure`
        # it takes the `figure` instance as a parameter to __init__
        self.canvas = FigureCanvas(self.figure)

        # this is the Navigation widget
        # it takes the Canvas widget and a parent
        self.toolbar = NavigationToolbar(self.canvas, self)

        # set the layout
        layout = QtGui.QVBoxLayout(self.widget_graph)
        layout.addWidget(self.toolbar)
        layout.addWidget(self.canvas)
        self.setLayout(layout)

        # self.addButton = QtGui.QPushButton('button to add other widgets')

        # self.mainLayout = QtGui.QVBoxLayout(self.widget_graph)

        # self.mainLayout.addWidget(self.addButton)

        data = cumprod(
            1 + random.randn(1000, int(Time_mature * 252)) *
            (volatilidad / sqrt(int(Time_mature * 252))), 1) * k
        print(data)
        ax = self.figure.add_subplot(111)
        ax.clear()
        avg = [
            sum([subdata[j] for subdata in data]) / len(data)
            for j in xrange(len(data[0]))
        ]
        perc95 = percentile(95, data)
        perc5 = percentile(5, data)
        ax.plot(perc95, 'k- -')
        ax.plot(perc5, 'k- -')
        ax.plot(avg, 'r-')
        #for i in data:
        #    ax.plot(i, '*-')
        self.canvas.draw()

        # data = [random.random() for i in range(10)]

        # # create an axis
        # ax = self.figure.add_subplot(111)

        # # discards the old graph
        # ax.clear()

        # # plot data
        # ax.plot(data, '*-')

        # # refresh canvas
        # self.canvas.draw()

        print("alla")
        self.show()
コード例 #16
0
            f = ImageFont.truetype(font, font_size)

            x_offset = (new_size / 8.0 * (random.random() - 0.0))
            y_offset = (new_size / 4.0 * (random.random() - 0.0))
            n_perline = max(
                1,
                math.ceil((new_size - 1 * x_offset) / font_size) - 1)
            n_lines = math.ceil(1.0 * len(text) / n_perline)

            y_delta = int(font_size * 1.5)
            for t in range(n_lines):
                if t == n_lines - 1:
                    d.text( (x_offset,y_offset+t*y_delta), \
                        text[ (0+t*n_perline): ], fill=0, font=f)
                else:
                    d.text( (x_offset,y_offset+t*y_delta), \
                        text[(0+t*n_perline):((t+1)*n_perline)], fill=0, font=f)

            if noisy:
                noise = 25.5 * random.randn() * random.randn(
                    img.size[0], img.size[1])
                img = np.asarray(img, dtype=np.float) + noise
                img = np.maximum(np.minimum(img, 255), 0)
                img = Image.fromarray(img.astype('uint8'), 'L')

            img.save(dest_path + str(pic_count).zfill(6) + '.png')
            img.close()
            pic_count += 1

        print('')
コード例 #17
0
           [0, 0, 0, 1, 0, 0],
           [0, 0, 0, 0, 1, 0],
           [0, 0, 0, 0, 0, 1]])

dat = 1.35
r = np.array([1, 1, 1])
G = np.mat([[t**2/2, 0, 0],
           [0, t**2/2, 0],
           [0, 0, t**2/2],
           [t, 0, 0],
           [0, t, 0],
           [0, 0, t]])
Q = dat*np.diag(r)
TSOA = math.sqrt(10)
TDOA = math.sqrt(10)
random2 = random.randn(3, 1)
W = sqrt(Q)*random2

L = 6
alpha = 0.3#可以调节 改变均值
kalpha = 0.54
belta = 2#对高斯分布通常是2最优100 可以改变方差
lamada = alpha*alpha*(L+kalpha)-L
c = L+lamada
Wm = [lamada/c, 0.5/c, 0.5/c, 0.5/c, 0.5/c, 0.5/c, 0.5/c, 0.5/c, 0.5/c, 0.5/c, 0.5/c, 0.5/c, 0.5/c]
Wc = Wm
Wc[0] = Wc[0]+(1-alpha**2+belta)
c = sqrt(c)
xsP1 = zeros((6, 6))
xsP2 = zeros((6, 6))
xsP11 = zeros((6, 6))
コード例 #18
0
 def sample(self):
     """Update internal state and return it as a noise sample."""
     x = self.state
     dx = self.theta * (self.mu - x) + self.sigma * np.array([random.randn() for i in range(len(x))])
     self.state = x + dx
     return self.state
コード例 #19
0
# POSSIBILITY OF SUCH DAMAGE.
#
# Revision $Id$

## Simple talker demo that published std_msgs/Strings messages
## to the 'chatter' topic

import rospy
from assignment.msg import iotsensor
from std_msgs.msg import String
import random

pub = rospy.Publisher('iot_sensor_topic', iotsensor, queue_size=10)

rospy.init_node('iot_sensor_publisher_node', anonymous=True)

rate = rospy.Rate(1)

i = 0
while not rospy.is_shutdown():
    iot_sensor = iot_sensor()
    iot_sensor.id = 1
    iot_sensor.name = "iot_parking_01"
    iot_sensor.temperature = 24.33 + (random.randn() * 2)
    iot_sensor.humidity = 33.4 + (random.randn() * 2)
    rospy.loginfo("I publish")
    rospy.loginfo(iot_sensor)
    pub.publish(iot_sensor)
    i = i + 1
    rate.sleep()
        r = -i - 1
        axis_x.append(i)
        axis_y_l.append(data[l])
        axis_y_r.append(data[r])
    plt.plot(axis_x, axis_y_r, label=r"$x_i^{max}(k)$")
    plt.plot(axis_x, axis_y_l, label=r"$x_i^{min}(k)$")
    plt.xlabel(r"iteration number $k$")
    plt.ylabel(r"$x_i^{max}(k)$ and $x_i^{min}(k)$")
    plt.legend()
    plt.show()


def variance_consensus():
    size = 40
    data = read_data("data40.txt")
    net = Network(new_top=True, num=size, s_dis=40)
    net.set_data(data)
    net.variance_consensus(max_iter=60)


def generic_pdf():
    size = 40
    data = read_data("data40.txt")
    net = Network(new_top=True, num=size, s_dis=40)
    net.set_data(data)
    net.generic_pdf_consensus(sections=20, sim=True, max_iter=60)


A = random.randn(4, 3)
B = sum(A, axis=1, keepdims=True)
print(B.shape)
コード例 #21
0
ファイル: locals.py プロジェクト: jmread/alife
def br_corner():
    return random.randn(2) * 100.0 + (SCREEN - array([VISION,VISION]))
コード例 #22
0
ファイル: locals.py プロジェクト: jmread/alife
def random_corner():
    if random.rand() > 0.5:
        return random.randn(2) * 10.0 + (SCREEN - array([VISION,VISION]))
    else:
        return random.randn(2) * 10.0 + (zeros(2) + array([VISION,VISION]))
コード例 #23
0
def fun():
    x = random.randn(2) / 10
    print(x)
    return ((1 + x[0]**2 + x[1]**2))
コード例 #24
0
N = 10001
Nf = 3
t = arange(N, dtype=float)
Ts = random.rand(Nf) * 200 + 100
fs = 1 / Ts
print('The real unknown frequencies are:', fs)

amp = random.rand(Nf) * 200 + 100
phi = random.rand(Nf) * 2 * pi

h = zeros(N)

for j in range(len(fs)):
    h += amp[j] * np.sin(2 * pi * t * fs[j] + phi[j])

hn = h + random.randn(N) * 3 * h + random.randn(N) * 700
plt.scatter(t, hn, s=3)
plt.show()

#Frequency Sampling
ind = arange(1, int(N / 2 + 1))  # Sampling of real space
allfreqs = fftfreq(N)  # Sampling of frequency space
realfreqs = fftfreq(
    N
)[ind]  # Frequencies in which we are interested (we omit redundancies due to complex conjugates)

#We now put the fourier transform coefficients
Hn = scipy.fftpack.fft(hn)
plt.plot(allfreqs, Hn)
plt.show()
コード例 #25
0
ファイル: random_fire.py プロジェクト: yaron1000/wrf-fire
slpsigma=sigma*.1/sqrt(2)
windsigma=sigma*5./sqrt(2)

cenx=nx*dx/2.
ceny=ny*dy/2.
dz=sqrt(dx**2+dy**2)
ignr=dz*2
historys=runtime
timestep=dz*1.0/6.

ignxsigma=sigma*dx*nx/10.
ignysigma=sigma*dy*ny/10.

seed()
slpx=randn(0,slpsigma)
slpy=randn(0,slpsigma)
windx=randn(0,windsigma)
windy=randn(0,windsigma)
fuel=randint(1,13)
ignx=randn(cenx,ignxsigma)
igny=randn(ceny,ignysigma)

args=['--nx',nx,'--ny',ny,'--dx',dx,'--dy',dy,
      '--windx',windx,'--windy',windy,'--slopex',slpx,'--slopey',slpy,
      '--fuelcat',fuel,'--timestep',timestep,'--runtime',runtime,'--history',historys,
      '--ignx1',ignx,'--ignx2',ignx,'--igny1',igny,'--igny2',igny,
      '--ignr',ignr,'--ignt1',igntime,'--ignt2',igntime]

args=[ str(a) for a in args]
コード例 #26
0
ファイル: nmb_toy_create.py プロジェクト: pombredanne/nmb
def createRun():

	file_runlist = open(filename_runlist,'w')
	file_truth = open(filename_truth,'w')
	truth_header = '# true_si true_hlr true_g1 true_g2 model_si model_hlr model_g1 model_g2 noise_hlr noise_g1 noise_g2\n'
	file_truth.write(truth_header)
	
	grid_sersic_index = arange(config['grid']['min'],config['grid']['max']+config['grid']['step'],config['grid']['step'])

	file_conf_templ = open(filename_yaml_templ,'r')
	conf_templ = file_conf_templ.read()

	file_ini_templ = open(filename_ini_templ,'r')
	ini_templ = file_ini_templ.read()

	logging.info('got %d galaxies' % len(config['gals']))

	for iser,ser in enumerate(grid_sersic_index):

		ini_filled = ini_templ % (config['n_pix'],ser)
		filename_ini = 'si%02d.ini' % iser
		filepath_ini = os.path.join(dirname_ini,filename_ini)
		file_ini = open(filepath_ini,'w')
		file_ini.write(ini_filled)
		file_ini.close()

	for igal,gal in enumerate(config['gals']):

		# get the real galaxy
		n_tile=config['n_tile']
		n_pix=config['n_pix']
		hlr = gal['half_light_radius']
		g1  = gal['g1']
		g2  = gal['g2']
		sersic_index_real = gal['sersic_index']
		filename_real = 'real%02d.fits' % (igal)
		filepath_real = os.path.join(dirname_images,filename_real)

		conf_filled = conf_templ % (sersic_index_real,hlr,g1,g2,n_tile,n_tile,n_pix,n_pix,filepath_real)

		filename_conf = 'real%02d.yaml' % igal
		filepath_conf = os.path.join(dirname_yaml,filename_conf)
		file_conf = open(filepath_conf,'w')
		file_conf.write(conf_filled)
		file_conf.close()

		filename_cmd = 'cmd.sh'
		file_cmd = open(filename_cmd,'w')
		file_cmd.write('python %s %s\n' % (filepath_galsim_yaml, filepath_conf))
		file_cmd.close()

		if (not args.reimage) and os.path.isfile(filepath_real):
			logging.info('NOT creating %s' % filepath_real)
		else:
			subprocess.call(('sh',filename_cmd))

		# filename_ini = os.path.join(dirname_ini,'si%02d.ini' % ser)

		image_tiled = pyfits.getdata(filepath_real)
		image_stamp = image_tiled[0:n_pix,0:n_pix]
		noise_std = linalg.norm(image_stamp)/config['snr']
		img_real = image_tiled
		noise_same = []

		for nn in range(config['n_reps_diff']):

			noise = random.randn(img_real.shape[0],img_real.shape[1])*noise_std
			filename_noisy_real_diff = filename_real + ('.d%02d' % nn)
			filepath_noisy_real_diff = os.path.join(dirname_images,filename_noisy_real_diff)
			img_real_noisy_diff = img_real + noise

			if (not args.reimage) and os.path.isfile(filepath_noisy_real_diff):
				logging.info('NOT creating noisy images %s' % (filepath_noisy_real_diff))
			else:
				pyfits.writeto(filepath_noisy_real_diff,img_real_noisy_diff.astype(float32),clobber=True)

			for iser,ser in enumerate(grid_sersic_index):	

				filename_ini = 'si%02d.ini' % iser
				filepath_ini = os.path.join(dirname_ini,filename_ini)

				file_runlist.write('%s\t%s\n' % (filename_noisy_real_diff,filename_ini))

		for nn in range(config['n_reps_same']):

			noise_same.append(random.randn(img_real.shape[0],img_real.shape[1])*noise_std)
			filename_noisy_real_same = filename_real + ('.s%02d' % nn)
			filepath_noisy_real_same = os.path.join(dirname_images,filename_noisy_real_same)
			img_real_noisy_same = img_real + noise_same[nn]

			if (not args.reimage) and os.path.isfile(filepath_noisy_real_same):
				logging.info('NOT creating noisy images %s' % (filepath_noisy_real_same))
			else:
				pyfits.writeto(filepath_noisy_real_same,img_real_noisy_same.astype(float32),clobber=True)

			for iser,ser in enumerate(grid_sersic_index):	

				filename_ini = 'si%02d.ini' % iser
				filepath_ini = os.path.join(dirname_ini,filename_ini)

				file_runlist.write('%s\t%s\n' % (filename_noisy_real_same,filename_ini))



		# now create the bestfit images
		for iser,ser in enumerate(grid_sersic_index):

			# this should have been created already
			filename_ini = 'si%02d.ini' % iser
			filepath_ini = os.path.join(dirname_ini,filename_ini)
			logging.info('running im3shape')
			i3gal = getBestFit(image_stamp,filepath_ini)
			ser_g1 = i3gal.params_gal_measured[2]
			ser_g2 = 0
			ser_hlr  = i3gal.params_gal_measured[4]

			truth_line = '%2.2f\t%2.8f\t% 2.8f\t% 2.8f\t%2.2f\t%2.8f\t% 2.8f\t% 2.8f\n' % (sersic_index_real,hlr,g1,g2,ser,ser_hlr,ser_g1,ser_g2)
			logging.info(truth_line)
			file_truth.write(truth_line)

			filename_bfit = 'real%02d.bfit%02d.fits' % (igal,iser)
			filepath_bfit = os.path.join(dirname_images,filename_bfit)

			conf_filled = conf_templ % (ser,ser_hlr,ser_g1,ser_g2,n_tile,n_tile,n_pix,n_pix,filepath_bfit)

			filename_conf = 'real%02d.bfit%02d.yaml' % (igal,iser)
			filepath_conf = os.path.join(dirname_yaml, filename_conf)
			file_conf = open(filepath_conf,'w')
			file_conf.write(conf_filled)
			file_conf.close()

			filename_cmd = 'cmd.sh'
			file_cmd = open(filename_cmd,'w')
			file_cmd.write('python %s %s\n' % (filepath_galsim_yaml, filepath_conf))
			file_cmd.close()

			if (not args.reimage) and os.path.isfile(filepath_bfit):
				logging.info('NOT creating %s' % filepath_bfit)
			else:
				subprocess.call(('sh',filename_cmd))

			# create nosiy versions

			noise_std = linalg.norm(image_stamp)/config['snr']
	
			img_real = pyfits.getdata(filepath_real)
			img_bfit = pyfits.getdata(filepath_bfit)

			for nn in range(config['n_reps_diff']):

				# add different noise maps
				filename_noisy_bfit_diff = filename_bfit + ('.d%02d' % nn)
				filepath_noisy_bfit_diff = os.path.join(dirname_images,filename_noisy_bfit_diff)
				noise = random.randn(img_bfit.shape[0],img_bfit.shape[1])*noise_std
				img_bfit_noisy_diff = img_bfit + noise
				
				if (not args.reimage) and os.path.isfile(filepath_noisy_bfit_diff):
					logging.info('NOT creating noisy images %s' % (filepath_noisy_bfit_diff))
				else:
					pyfits.writeto(filepath_noisy_bfit_diff,img_bfit_noisy_diff.astype(float32),clobber=True)

				file_runlist.write('%s\t%s\n' % (filename_noisy_bfit_diff,filename_ini))

			for nn in range(config['n_reps_same']):
					
				# add same noise maps
				filename_noisy_bfit_same = filename_bfit + ('.s%02d' % nn)
				filepath_noisy_bfit_same = os.path.join(dirname_images,filename_noisy_bfit_same)
				img_bfit_noisy_same = img_bfit + noise_same[nn]

				if (not args.reimage) and os.path.isfile(filepath_noisy_bfit_same):
					logging.info('NOT creating noisy image %s' % (filepath_noisy_bfit_same))
				else:
					pyfits.writeto(filepath_noisy_bfit_same,img_bfit_noisy_same.astype(float32),clobber=True)

				file_runlist.write('%s\t%s\n' % (filename_noisy_bfit_same,filename_ini))
コード例 #27
0
ファイル: main.py プロジェクト: XhitHub/gans_expt
def generate_latent_points(latent_dim, n_samples):
    # generate points in the latent space
    x_input = random.randn(latent_dim * n_samples)
    # reshape into a batch of inputs for the network
    x_input = x_input.reshape(n_samples, latent_dim)
    return x_input
コード例 #28
0
import random
import sys
import os
import pandas as pd
from pylab import *
from scipy.cluster.vq import *
from numpy import *
from PIL import *

# generate test data
# generate 100 rows and 2 columns

class1 = 1.5 * random.randn(100, 2)
print(class1)
class2 = random.randn(100, 2) + array([5, 5])
# print(class2)
features = vstack((class1, class2))
#print(features[0:1])
#print("wanttttt")

# K-Means clustering
centroids, variance = kmeans(features, 2)
# print(centroids)
#print("varancccc")
# print(variance)
code, distance = vq(features, centroids)
print(vq(features, centroids))
# print(distance)
figure()

ndx = where(code == 0)[0]