Esempio n. 1
0
 def get_derivatives(self, smooth_loss=True, lr_begin=0.001, lr_end=1):
     '''
     returns: tuple of array of derivative of loss w.r.t lr,
              lr array and loss array in the specified range of lr
     parameters:
       smooth: whether to use smooth loss
       lr_begin & lr_end: these learning rates specify the range in which to
                     calculate derivative of the loss w.r.t learning rate
     '''
     lr_complete_vector = np.array(self.lrs)
     if lr_begin is not None and lr_end is not None:
         indices = np.where((lr_complete_vector > lr_begin)
                            & (lr_complete_vector < lr_end))
         lr_vector = np.array(lr_complete_vector[indices])
         if smooth_loss:
             loss_vector = np.array(self.smoothed_losses)[indices]
         else:
             loss_vector = np.arrat(self.losses)[indices]
     elif lr_begin is not None and lr_end is None:
         indices = np.where(lr_complete_vector > lr_begin)
         lr_vector = np.array(lr_complete_vector[indices])
         if smooth_loss:
             loss_vector = np.array(self.smoothed_losses)[indices]
         else:
             loss_vector = np.arrat(self.losses)[indices]
     else:
         indices = np.where(lr_complete_vector < lr_end)
         lr_vector = np.array(lr_complete_vector[indices])
         if smooth_loss:
             loss_vector = np.array(self.smoothed_losses)[indices]
         else:
             loss_vector = np.arrat(self.losses)[indices]
     der_vector = np.gradient(lr_vector, loss_vector)
     return der_vector, lr_vector, loss_vector
Esempio n. 2
0
 def feature_hot_encoding(self, l_dict, cimg_idx):
     if len(l_dict['features']) == 14:
         landmark = np.array(l_dict['features'][0:10], dtype=np.float32)
         if l_dict['features'][10] == 0:
             gender = np.array([1., 0.], dtype=np.float32)
         else:
             gender = np.array([0., 1.], dtype=np.float32)
         if l_dict['features'][11] == 0:
             smile = np.array([1., 0.], dtype=np.float32)
         else:
             smile = np.array([0., 1.], dtype=np.float32)
         if l_dict['features'][12] == 0:
             glasses = np.array([1., 0.], dtype=np.float32)
         else:
             glasses = np.array([0., 1.], dtype=np.float32)
         if l_dict['features'][13] == 0:
             headpose = np.array([1., 0., 0., 0., 0.], dtype=np.float32)
         elif l_dict['features'][13] == 1:
             headpose = np.array([0., 1., 0., 0., 0.], dtype=np.float32)
         elif l_dict['features'][13] == 2:
             headpose = np.arrat([0., 0., 1., 0., 0.], dtype=np.float32)
         elif l_dict['features'][13] == 3:
             headpose = np.array([0., 0., 0., 1., 0.], dtype=np.float32)
         else:
             headpose = np.array([0., 0., 0., 0., 1.], dtype=np.float32)
         return landmark, gender, smile, glasses, headpose
     if len(l_dict['features']) == 13:
         print(cimg_idx)
         landmark = np.array(l_dict['features'][0:9], dtype=np.float32)
         landmark = np.insert(landmark, 9, 0)
         l_dict['features'] = np.insert(l_dict['features'], 9, 0)
         if l_dict['features'][10] == 0:
             gender = np.array([1., 0.], dtype=np.float32)
         else:
             gender = np.array([0., 1.], dtype=np.float32)
         if l_dict['features'][11] == 0:
             smile = np.array([1., 0.], dtype=np.float32)
         else:
             smile = np.array([0., 1.], dtype=np.float32)
         if l_dict['features'][12] == 0:
             glasses = np.array([1., 0.], dtype=np.float32)
         else:
             glasses = np.array([0., 1.], dtype=np.float32)
         if l_dict['features'][13] == 0:
             headpose = np.array([1., 0., 0., 0., 0.], dtype=np.float32)
         elif l_dict['features'][13] == 1:
             headpose = np.array([0., 1., 0., 0., 0.], dtype=np.float32)
         elif l_dict['features'][13] == 2:
             headpose = np.arrat([0., 0., 1., 0., 0.], dtype=np.float32)
         elif l_dict['features'][13] == 3:
             headpose = np.array([0., 0., 0., 1., 0.], dtype=np.float32)
         else:
             headpose = np.array([0., 0., 0., 0., 1.], dtype=np.float32)
         return landmark, gender, smile, glasses, headpose
Esempio n. 3
0
def OR(x1, x2):
    x = np.arrat([x1, x2])
    w = np.array([0.5, 0.5])  #w는 가중치
    b = -0.2  #b는 편향(가중치)
    tmp = np.sum(w * x) + b
    if tmp <= 0:
        return 0
    else:
        return 1
Esempio n. 4
0
 def __init__(self, field_dims, embed_dim):
     super().__init__()
     self.num_fields = len(field_dims)
     self.embeddings = nn.ModuleList([
         nn.Embedding(sum(field_dims), embed_dim) for _ in range(self.num_fields)
     ])
     self.offsets = np.arrat(0, *np.cumsum(field_dims)[:-1], dtype=np.long)
     for embedding in self.embeddings:
         nn.init.xavier_uniform_(embedding.weight.data)
Esempio n. 5
0
def unroll(record):
    startdate = np.datetime64('{}-{:02}'.format(record['year'],
                                                record['month']))
    dates = np.arange(startdate, startdate + np.timedelta64(1, 'M'),
                      np.timedelta64(1, 'D'))

    rows = [(date, record[str(i + 1)] / 10) for i, date in enumerate(dates)]

    return np.arrat(rows, dtype=[('date', 'M8[D]'), ('value', 'd')])
 def gen_lin_separable_data():
     #generate training data in the 2-d case
     mean1 = np.array([0, 2])
     mean2 = np.array([2, 0])
     cov = np.arrat([[0.8, 0.6], [0.6, 0.8]])
     X1 = np.random.multivariate_normal(mean1, cov, 100)
     y1 = np.ones(len(X1))
     X2 = np.random.multivariate_normal(mean2, cov, 100)
     y2 = np.ones(len(X2)) * -1
     return X1, y1, X2, y2
Esempio n. 7
0
def two2three(angles, r1, r2, v1, v2, x, y, vx, vy):

    #add 0.0 to the z coordinate
    r1vf = np.transpose([r1 + (0.0, )])
    r2vf = np.transpose([r2 + (0.0, )])
    v1vf = np.transpose([v1 + (0.0, )])
    v2vf = np.transpose([v2 + (0.0, )])

    theta1, theta2, theta3 = angles  #extract angles
    #calculate cos and sin
    c3, s3 = np.cos(theta3), np.sin(theta3)
    c2, s2 = np.cos(-theta2), np.sin(-theta2)
    c1, s1 = np.cos(theta1), np.sin(theta1)

    #Create rotation matrices
    R3 = np.array(((1.0, 0.0, 0.0), (0.0, c3, -s3), (0.0, s3, c3)))
    R2 = np.array(((c2, 0.0, s2), (0.0, 1.0, 0.0), (-s2, 0.0, c2)))
    R1 = np.array(((c1, -s1, 0.0), (s1, c1, 0.0), (0.0, 0.0, 1.0)))
    #Dot them into one
    R = np.dot(R1, np.dot(R2, R3))

    #Rotate r and v
    r1v1 = np.dot(R, r1vf)
    r2v1 = np.dot(R, r2vf)
    v1v1 = np.dot(R, v1vf)
    v2v1 = np.dot(R, v2vf)

    #apply rotation to all points and velocities of trajectory
    N = len(x)
    xr = np.array([0.0] * N)
    yr = np.array([0.0] * N)
    zr = np.array([0.0] * N)
    vxr = np.array([0.0] * N)
    vyr = np.array([0.0] * N)
    vzr = np.array([0.0] * N)

    for i in range(N):
        r = np.transpose(np.array([[x[i], y[i], 0.0]]))
        r = np.dot(R, r)
        #save it
        xr[i] = r[0]
        yr[i] = r[1]
        zr[i] = r[2]

        #now for the velocities
        v = np.transpose(np.arrat([[vx[i], vy[i], 0.0]]))
        v = np.dot(R, v)
        #save it
        vxr[i] = v[0]
        vyr[i] = v[1]
        vzr[i] = v[2]

    return r1v1, r2v1, v1v1, v2v1, xr, yr, zr, vxr, vyr, vzr
Esempio n. 8
0
def test(doubleSets):
    bools = []
    features = []
    correct = 0
    incorrect = 0
    for item in doubleSets:
        bools.append(item['year'])
        vec = fe.get(item['sentences1'],item['sentences2'])
        titles.append([item['title1'],item['title2']])
        features.append(vec)

    for feature in range(len(features)):
        predict = clf.predict(np.array9[features[feature]]))
        prob = clf.predict_proba(np.arrat([features[feature]]))
        probs.append([predict,prob, bools[feature]])
Esempio n. 9
0
def integrated_gradients(input_model,
                         image,
                         nsteps=100,
                         layer_name="predictions",
                         cls=-1):
    def interpolated_images(original, nsteps):
        outs = []
        for i in range(nsteps - 1):
            out = original - original * (i * 1 / (nsteps))
            outs.append(out)
        outs.append(original)
        outs = np.array(outs)
        return outs[::-1]

    if len(image.shape) == 3:
        image = np.expand_dims(image, axis=0)
    if len(image.shape) == 1:
        image = np.expand_dims(image, axis=0)

    grads_val = []
    if cls == -1:
        _cls = np.argmax(input_model.predict(image))
    else:
        _cls = np.arrat(cls)

    input_imgs = input_model.input

    layer_output = input_model.get_layer(layer_name).output[:, _cls]  #batched
    grads = K.gradients(layer_output, input_imgs)[0]  #batched
    backprop_fn = K.function([input_imgs, K.learning_phase()], [grads])

    images = interpolated_images(image[0], nsteps=nsteps)

    _grads_val = np.array(backprop_fn([images, 0])[0])
    #force absolute gradients
    _grads_val = np.average(_grads_val, axis=0)
    _grads_val = np.abs(_grads_val).max(axis=-1) / _grads_val.max()
    grads_val = np.array(_grads_val)

    del grads, backprop_fn
    return grads_val
def plotBestFit(data1,data2):
    dataArr1 = np.arrat(data1)
    dataArr2 = np.array(data2)

    m = np.shape(dataArr1)[0]
    axis_x1 = []
    axis_y1 = []
    axis_x2 = []
    axis_y2 = []
    for i in range(m):
        axis_x1.append(dataArr1[i,0])
        axis_y1.append(dataArr1[i,1])
        axis_x2.append(dataArr2[i,0])
        axis_y2.append(dataArr2[i,1])
    fig = plt.figure()
    ax = fig.add_subplot(111)
    ax.scatter(axis_x1,axis_y1,s=50,c='red',marker='s')
    ax.scatter(axis_x2,axis_y2,s=50,c='blue')
    plt.xlabel('x1');plt.ylabel('x2')
    plt.savefig("outfile.png")
    plt.show()
## License: Apache 2.0. See LICENSE file in root directory.
## Copyright(c) 2015-2017 Intel Corporation. All Rights Reserved.

###############################################
##      Open CV and Numpy integration        ##
###############################################

import pyrealsense2 as rs
import numpy as np
import cv2
import os
from Cognition import Cognition

transform_matrix = np.arrat([[-1.02487292, 0.34022334, 0.02018987, 0.53352241],
                             [0.17523787, 0.87148062, -0.43759237, 0.70475617],
                             [-0.0255134, -0.66172576, -0.5131572, 0.49851241],
                             [0., 0., 0., 1.]])

if __name__ == "__main__":
    # Configure depth and color streams
    pipeline = rs.pipeline()
    config = rs.config()

    # Get device product line for setting a supporting resolution
    pipeline_wrapper = rs.pipeline_wrapper(pipeline)
    pipeline_profile = config.resolve(pipeline_wrapper)
    device = pipeline_profile.get_device()
    device_product_line = str(device.get_info(rs.camera_info.product_line))

    config.enable_stream(rs.stream.depth, 640, 480, rs.format.z16, 30)
Esempio n. 12
0
from mpl_toolkits.mplot3d import Axes3D
from matplotlib import cm
import matplotlib.pyplot as plt
import numpy as np

n_angles = 36
n_radii = 8

# An array of radii
# Does not include radius r=0, this is to eliminate duplicate points
radii = np.linspace(0.125, 1.0, n_radii)
x = np.array([66.68,65.77,66.01,66.48,66.69,66.23,65.87,65.28,65.17,65.41,65.57,64.26,65.51,65.7,65.31,64.54,65.01,65.35,63.8,64.05,63.55,63.93,64.36,63.59,63.98,64.38,61.67,63.17,63.27,62.42,61.97,60.84,63.37,62.53,60.46,59.93,61.61,61.69,60.45,61.75,63.1,61.42,58.31,59.24,62.07,58.49,61.8,58.24,60.14,59.71,59.89,57.68,59.54,57.97,57.61,58.08,58.01,58.85,59.73,58.46,57.5,59.3,56.17,54.68,57.01,55.86,57.16,54.54,55.52,52.06,55.03,55.34,56.99,55.4,52.61,56.37,56.95,54.77,53.36,54.77,55.67,54.41,54.11,54.9,55.2,54.58,51.34,52.42,50.18,53.94,53.28,50.98,53.06,51.43,52.86,53.15,55.68,48.79,49.89,48.95,50.45,51.39,50.26,48.8,49.86,51.26,50.9,52.77,50.59,49.82,49.74,49.24,50.13,47.99,48.3,47.17,51.11,48.57,51.7,50.41,47.79,47.39,47.79,47.15,49.83,49.62,47.15,50.06,47.09,43.76,47.17,43.51,46.01,43.32,46.08,45.05,43.53,45.54,45.73,45.02,42.94,42.39,42.9,45.7,42.82,45.31,45.62,41.61,41.85,42.87,42.2,39.14,42.57,43.72,41.47,41.23,44.17,42.97,38.55,41.67,41.91,43.03,41.56,41.54,39.84,45.09,40.51,40.41,41.58,41,40.89,39.74,40.29,38.12,41.55,38.13,36.58,37.36,36.71,36.42,35.75,38.4,33.59,38.52,36.88,37.75,37.12,36.42,39.62,36.22,33.19,35.08,39.41,37.09,38.35,33.96,34.84,33.16,36.68,34.51,35.56,37.66,35.64,33.9,32.18,35.3,33.82,33.5,33.84,30.31,32.69,29.8,31.19,30.08,30.29,24.42,31.93,29.63,30.4,31.58,30.83,30.71,28.78,29.67,28.63,30.87,28.13,32.18,29.64,29.59,31.41,22.03,26.13,32.23,27.53,23.82,25.73,28.02,25.12,25.38,27.65,27.35,24.12,27.82,28.44,25.62,29.64,22.11,24.27,22.27,24.86,24.13,23.21,21.52,22.84,31.56,23.04,23.97,27.77,25.01,23.05,21.13,22.97,21.81,25.92,21.96,24.18,22.54,20.11,19.98,18,20.79,19.69,18.85,24,18.7,21.41,18.38,19.59,15.24,21.94,16.4,11.68,18.28,18.82,16.7,15.32,17.5,18.58,16.17,19.11,17.65,17.09,15.32,10.28,18.88,17.52,17.46,13.49,12.31,17.04,13.29,15.53,17.88,12.93,10.24,13.52,12.26,11.55,11.37,15.39,8.09,14.21,14.35,19.17,14.5,9.57,13.99,11.29,10.68,9.35,8.92,9.06,10.51,11.83,8.59,10.19,4.03,10.34,10.07,8.83,11.2,9.12,9.53,9.54,10.44,10.22,9.14,0.94,6.02,9.44,3.35,2.66,7.91,5.85,9.11,9.7,0.34,0.06,0.31,7.61,7.68,3.54,4.05,1.47,8.71,0.05,8.38,-0.05,3.94,5.4,0.18,0.39,0.38,0.28,0.32,0.21,0.7,0.88,0.24,0.15,-0.68,0.42,3.73,0.09,0.4,0.69,0.33,0.28,-0.12,0.15,0.4,0.2,0.59,-0.13,0.24,-0.01,-0.96,-0.01,-0.44,-2.69,1.46,0.04,-8.54,-5.26,-1.93,-2.39,-2.7,-5.17,-0.11,-11.98,-7.21,-3.5,-2.81,-8.44,-6.53,-6.31,-9.15,-8.68,-7.25,-8.05,-8.29,-5.91,-8.54,-10.15,-9.6,-8.32,-10.56,-9.39,-9.24,-7.34,-6.26,-8.12,-5.88,-10.33,-8.66,-7.54,-9.72,-9.9,-7.73,-8.72,-8.67,-8.47,-10.55,-9.61,-9.57,-12.88,-14.2,-11.79,-11.25,-9.64,-11.66,-11.33,-12.71,-16.45,-11.94,-13.37,-16.22,-14.42,-16.8,-14.94,-15.5,-13.48,-16.89,-10.74,-17.36,-16.11,-15.23,-16.71,-13.34,-13.38,-14.81,-16.23,-18.09,-18.69,-16.82,-19.81,-17.69,-18.87,-15.54,-18.03,-17.89,-15.28,-18.57,-22.04,-21.09,-24.15,-18.23,-18.8,-17.32,-18.22,-23.19,-16.31,-18.01,-19.59,-23.43,-20.56,-23.06,-19.95,-21.69,-25.2,-23.3,-20.06,-20.51,-22.46,-27.41,-23.3,-24.78,-24.61,-26.93,-23.89,-23.71,-26.32,-23.08,-24.57,-25.7,-21.01,-27.68,-26.18,-30.78,-25.17,-23.58,-26.64,-27.18,-26.53,-27.45,-26.27,-28.85,-26.05,-28.24,-29.46,-26.68,-27.37,-26.85,-22.97,-25.61,-26.8,-28.19,-28.07,-27.1,-29.26,-29.35,-27.67,-26.75,-34.9,-28.09,-24.29,-30.55,-31.08,-29.92,-31.11,-31.67,-34.69,-30.3,-30.95,-32.42,-33.77,-29.71,-34.26,-33.95,-33.97,-37.15,-32.4,-38.96,-31.66,-34.75,-34.35,-32.45,-37.04,-38.06,-32.96,-36.64,-35.33,-41.1,-32.95,-37.59,-33.62,-36.35,-38.18,-37.78,-35.18,-32.8,-37.98,-38.77,-40.75,-40.37,-41.15,-37.26,-40.03,-39.24,-34.18,-34.76,-34.77,-39.49,-41.55,-40.17,-39.92,-40.26,-39.17,-39.81,-38.14,-39.3,-36.54,-42.09,-38.9,-39.85,-37.33,-42.13,-43.13,-41.08,-36.89,-38.73,-41.58,-39.11,-41.73,-42.38,-44.27,-44.23,-41.2,-45.47,-44.09,-47.32,-43.19,-45.28,-40.79,-45.23,-41.15,-44.89,-46.05,-45.56,-46.25,-48.59,-46.87,-47.21,-46.89,-43.8,-46.52,-45.13,-47.23,-47.44,-45.79,-45.94,-49.31,-46.04,-44.74,-50.4,-48.22,-46.87,-48.63,-46.95,-45.61,-49.6,-47.96,-48.83,-49.6,-48.09,-47.75,-48.77,-48.98,-53.39,-52.12,-49.31,-51.75,-52.84,-53.6,-51.34,-49.18,-50.66,-52.01,-51.72,-51.09,-52.08,-54.52,-54.91,-50.96,-53.35,-52.79,-52.42,-52.01,-50.55,-53.63,-54.09,-54.4,-54.99,-54.34,-52.68,-55.71,-56.52,-57.8,-54.49,-54.26,-55.15,-55.86,-54.02,-57.21,-55.37,-56.04,-52.08,-58.06,-56.46,-56.61,-57.21,-58.45,-54.53,-58.37,-58.15,-55.64,-58.3,-56.85,-56.21,-59.72,-58.53,-56.83,-59.75,-59.14,-59.37,-58.39,-60.11,-59.24,-61.07,-57.83,-61.95,-62.38,-61.58,-61.94,-60.63,-61.43,-62.48,-60.68,-61.36,-62.68,-61.83,-64.83,-62.14,-61.49,-62.13,-63.28,-62.78,-64.25,-63.98,-64.44,-65.08,-63.68,-63.68,-65.25,-65.76,-64.28,-65.08,-64.89,-65.2,-66.31,-65.88,-65.99,-64.98,-66.22,-66.34,-65.57,-64.9,-66.57,-66.88])
y = np.array([-21.1,-29.63,-41.28,-35.72,-25.87,-44.48,-17.72,-32.3,-14.87,-34.98,-20.08,-26.96,-43.55,-37.67,-23.32,-40.4,-32.28,-28.33,-30.58])
z = np.arrat([6.34,14.62,8.78,2.06,-8.05,-5.5,-10.32,25.03,25.39,18.76,16.39,8.49,12.25,1.95,-1.62,-3.37,-8.53,-14.92,-17.86,-20.82,32.93,31.99,26.96,29.83,18.19,20.58,4.28,-5.36,-13.06,-22.37,-24.57,40.58,16.74,-11.11,40.17,39.18,32.44,26.24,24.5,10.24,6.08,0.09,-31.26,39,28.8,32.17,16.99,8.98,-24.43,19.47,-9.22,-14.92,-22.04,-20.16,-29.82,36.13,30.38,15.82,12.76,3.62,-0.34,-7.3,-30.6,49.84,21.42,15.7,-10.78,-30.35,-31.94,52.01,46.29,27.89,24.43,11.39,-6.26,-32.29,45.72,43.58,33.38,6.66,4.18,-25.34,-35.27,-37.63,41.54,-7.52,-16.1,48.66,48.03,39.52,37.55,34.37,25.3,18.32,19.96,0.66,-0.58,-34.28,-34.19,-36.82,-44.97,52.35,51.31,46.59,39.84,26.79,10.92,6.28,48.29,14.12,6.69,-33.07,-36.16,56.27,33.54,25.82,-0.55,-11.41,-20.31,-35.86,-39.52,54.4,25.89,-4.81,-10.28,-45.89,46.49,-3.46,-25.95,-39.7,42.77,38.89,32.34,-44.86,56.85,58.32,43.39,14.89,2.18,-23.86,-51.81,60.13,57.47,53.63,52.44,15.23,2.14,-6.85,-10.94,-15.22,-31.91,-51.35,-53.55,53.95,49.27,8.24,-13.24,-35.13,-40.7,-37.08,59.7,34.12,25.31,9.86,-24.55,-37.22,-37.2,45.22,33.56,25.6,17.09,-11.47,-37.4,57.49,49.66,16.74,-41.18,60.04,54.86,34.92,4.71,-24.04,-37.19,-50.8,-55.51,57.38,38.55,31.36,-1.95,-17.24,-30.98,-57.38,60.8,61.76,42.07,43.98,29.48,-36.24,63.64,24.21,13.6,-0.15,49.72,50.67,22.93,10.27,-37.28,-41.13,56.31,-14.36,-56.33,66.35,61.04,-9.41,-35.77,-40.86,-52.29,64.29,46.95,40.25,16.36,-53.35,64.87,62.13,33.09,19.63,-22.63,64.81,7.71,-0.4,-5.74,-12.92,-38.53,-48.19,-58.02,54.92,17.26,-41.65,-46.38,57.18,47,32.15,24.53,58.57,40.66,6.28,-11.05,-19,-30.37,68.29,51.77,15.72,-41.35,63.29,46.36,-0.78,-53.92,-57.63,67.3,40.95,1.27,-9.08,-31.82,-47.68,-59.2,64.61,27.21,-17.58,-53.11,69.75,70.4,43.38,39.52,-40.03,-38.8,-58.01,68.56,67.91,67.4,-0.89,-39.76,56.29,19.14,9.94,4.67,-23.81,61.51,50.89,11.68,-6.29,-10.56,-40.17,-51.28,-56.2,67.95,51.29,44.96,30.99,-34.98,-58.25,62.48,17.29,-1.37,-12.67,-38.02,69.32,58.81,47.81,26.48,6.39,-21.28,-44.19,-42.43,66.95,37.84,70.46,43.18,-28.09,-40.14,-51.13,65.98,57.21,48.72,33.51,35.73,-9.22,71.94,-17.97,-17.29,70.88,-14.9,-19.96,43.12,-4.68,-27.45,-55.04,6.32,-13.93,-27.25,-33.9,-47.81,-48.35,-55.97,62.81,-52.56,71.43,55.58,48.13,35.86,11.87,0.57,-20.78,-19.33,-23.55,-39.18,70.53,69.57,62.23,19.77,-14.27,-37.61,-56.79,67.82,-32.9,70.5,65.58,9.96,4.47,-58.07,70.62,41.9,26.84,18.11,-44.36,71.02,69.15,66.61,62.87,62.66,59.57,59.14,41.5,35.91,27.05,0.23,-9.32,-18.23,-20.91,-25.39,49.61,-51.64,-52.6,53.15,-3.71,-34.49,-39.36,-45.85,-8.99,-47.15,55.24,17.12,-13.11,-17.05,-18.84,-18.51,-24.85,-41.21,65.34,60.57,49.39,42.04,-27.14,-57.47,70.91,66.39,63.63,40.79,10.49,-7.87,-18.01,-47.68,-55.63,-57.6,71.46,69.61,21.81,-42.11,70.91,56.93,8.05,-16.4,-38.99,3.07,-33.88,71.92,69.14,48.05,-21.07,-43.3,-54.27,32.28,25.42,0.29,-13.44,-54.24,55.3,46.6,38.27,13.85,-14.28,-25.95,-28.77,-37.17,-49.35,34.13,7.7,-6.51,-21.8,-41.06,-48.16,-58.25,70.72,65.96,-41.6,68.55,3.18,-11.49,-57.72,70.24,70.41,55.19,50.89,21.06,-3.44,-33.84,-35.2,-39.95,67.42,65.6,60.17,59.23,-8.19,-42.7,-56.75,68.75,36.92,-24.04,44.45,29.05,16.42,-40.87,-48.2,28.64,-14.17,-25.76,-49.57,54.95,46.14,12.04,-1.38,-56.01,65.77,40.29,18.91,-59.19,63.11,61.47,34.16,-17.82,-36.74,-54.95,68.46,67.48,51.92,28.9,1.41,7.7,-38.06,66.42,65.71,60.53,55.98,-9.07,-30.27,50.27,-12.1,-15.95,-30.33,22.59,-9.54,-58.19,66.78,63.94,59.06,-1.04,-24.07,-41.22,56.96,14.52,-6.61,-36.97,-41.7,-57.23,25.37,14.08,62.87,42.49,39.61,28.54,5.13,-20.86,-29.71,-41.68,-38.73,65.11,52.64,36.62,5.65,-9.23,-40.96,43.79,15.93,-4.82,-14.48,-21.31,62.04,50.88,16.22,62.61,60.16,54.32,33.71,-55.36,61.39,39.01,37.72,6.85,-4.81,-36.82,-48.64,-51.51,58.64,47.09,23.98,-3.08,-20.36,-39,50,32.44,32.19,25.96,6.04,-13.85,-19.79,-39.5,61.77,57.25,45.11,16.7,12.35,-28.62,-34.67,-44.21,59.25,18.99,-5.02,-50,50.1,43.25,23.63,1.63,-8.51,-13.92,57.37,55.12,-51.28,51,-34.83,-38.76,-37.98,-40.14,-39.41,57.04,56.77,56.08,26.79,13.72,-32.14,-26.16,-35.45,54.44,54.07,48.7,30.91,4.13,-30.73,53.8,44.53,-34.74,-33.23,-46.68,43.24,36.8,11.44,-0.07,-10.1,-13.53,37.14,21.72,51.31,50.04,45.98,-32.9,51.21,33.72,30.3,-3.11,-45.13,26.01,-2.71,-14.21,-7.19,-27.85,-32.3,-40.39,-38.2,47.15,23.2,10.5,6.83,-19,39.72,17.12,-16.36,-32.38,-33.85,46.5,41.24,33.04,5.98,-0.91,-11.38,-31.85,49.87,41.76,30.63,22.08,16.69,45.04,23.23,14.44,7.32,-28.61,-38.43,-30.19,0.06,-2.93,-17.42,-21.33,-28.43,42.01,31.95,10.09,-8.16,-9.82,-23.53,42.48,21.63,15.52,12.14,-31.12,-23.97,36.86,35.41,26.72,1.44,3.91,-5.32,-23.42,-25.52,-23.11,34.34,9.93,-13.1,23.38,10.05,-1.61,-14.52,27.76,16.63,-3.03,26.82,32.41,24.75,17.6,15.67,13.64,-12.16,26.36,20.17,8.11,2.86,-13.4,6.26,1.38,-5.05,-10.72,-18.1,7.36,-2.07])
# An array of angles
angles = np.linspace(0, 2*np.pi, n_angles, endpoint=False)

# Repeat all angles for each radius
angles = np.repeat(angles[...,np.newaxis], n_radii, axis=1)

# Convert polar (radii, angles) coords to cartesian (x, y) coords
# (0, 0) is added here. There are no duplicate points in the (x, y) plane
x = np.append(0, (radii*np.cos(angles)).flatten())
y = np.append(0, (radii*np.sin(angles)).flatten())

# Pringle surface
z = np.sin(-x*y)

fig = plt.figure()
ax = fig.gca(projection='3d')
Esempio n. 13
0
corpo2_area_molhada = corpo2_diametro * corpo2_comprimento

aleta_massa = float(
    aleta_densidade * aleta_espessura *
    ((aleta_comprimento_ponta + aleta_comprimento_raiz) * aleta_largura / 2))
aleta_area_molhada = (aleta_comprimento_ponta +
                      aleta_comprimento_raiz) * aleta_largura / 2
AR = (2 * aleta_largura**2) / aleta_area_molhada

area_de_referencia = pi * (coifa_diametro / 2)**2

#normalização do empuxo inicial
empuxox = [0]
empuxoy = [0]
empuxoz = [motor_empuxo]
empuxo = np.arrat([empuxox[t], empuxoy[t], empuxoz[t]])

#constantes
Terra_massa = 5.972 * (10**24)
Ar_densidade = 1.225
Ar_viscosidade = 1.8 * 10**-5
#velocidade do som
C = 340.29
#Altura média aproximada da rugosidade da superfície
Rs = 200 * 10**-6
#haste de lançamento
comprimento_da_haste = 2

posicaox = [0]
posicaoy = [0]
posicaoz = [0]
Esempio n. 14
0
import numpy as np
import pandas as pd
from sklearn.linear_model import LogisticRegression
from sklear.model_selection import train_test_split
import warnings
import pickle
warning.filterwaring("ignore")

data = pd.read_csv('forest_fire.csv')
data = np.arrat(data)

X = data[1:, 1:-1]
y = data[1:, -1]
y = y.astype('int')
X = X.astype('int')

X_train, X_test, y_train, y_test = train_test_split(X,
                                                    y,
                                                    test_size=0.3,
                                                    random_state=0)
log_reg = LogisticRegression()

log_reg.fit(X_train, y_train)

inputt - [int(x) for x in "45 32 60".split(' ')]
final = [np.array(inputt)]

b = log_reg.predict_proba(final)

pickle.dump(log_reg, open('model.pkl', 'wb'))
model = pickle.load(open('model.pkl', 'rb'))

z = np.linspace(2,10,5)


# In[15]:


z


# In[18]:


lst = [1,2,3,4]
s = np.arrat([lst])


# In[19]:


s= np.array([lst])


# In[20]:


s


# In[21]:
Esempio n. 16
0
def loaddata(datafile):
    return np.arrat(pd.read_csv(datafile,seo="\t",header=-1)).astype(np.float)