def __load_exk_embeddings(self, exk_embeddings_handler):

        path = PropertiesManager.get_prop_value(
            PropertiesNames.EMBEDDINGS_PATH)
        emb_max_words = PropertiesManager.get_prop_value(
            PropertiesNames.EMBEDDINGS_MAX_WORDS)
        if emb_max_words is not None:
            emb_max_words = int(emb_max_words)
        else:
            emb_max_words = 100000

        encoding = PropertiesManager.get_prop_value(PropertiesNames.ENCODING)
        print("Begin: Loading embeddings")
        exk_embeddings_handler.load(path,
                                    encoding,
                                    ofset=2,
                                    max_words=emb_max_words)
        print("End: Loading embeddings")

        if self.__oov_vector is None:
            self.__oov_vector = 2 * 0.1 * np_rand(
                exk_embeddings_handler.get_vector_embedding_dimension()) - 1
        exk_embeddings_handler.set_vector_embedding(
            "_OOV_", self.__features_magic_index["OOV"], self.__oov_vector)

        if self.__padding_vector is None:
            self.__padding_vector = 2 * 0.1 * np_rand(
                exk_embeddings_handler.get_vector_embedding_dimension()) - 1
        exk_embeddings_handler.set_vector_embedding(
            "_PADDING_", self.__features_magic_index["PADDING"],
            self.__padding_vector)
Beispiel #2
0
def test1(num_iters=100, scale=100., debug_print=False):
    for i in range(num_iters):
        A = make_positive_definite_matrix(dim=2,
                                          scale=scale,
                                          debug_print=debug_print)
        b = scale * (np_rand(2) * 2 - 1)
        c = np_uniform(low=-scale, high=scale)

        x0 = np_array([1., 1])
        fmin, _ = get_scipy_mins_for_quadratic_form(x0, A, b, c, alpha, beta)
        k = fmin + 1e-9 if i % 10 == 0 else np_uniform(low=fmin,
                                                       high=fmin + 400.)

        if debug_print:
            print("iter={}".format(i + 1))
            print("A=\n{}".format(A))
            print("b={}".format(b))
            print("c={}".format(c))
            print("k={} fmin={}".format(k, fmin))

        _, _, _ = qfc.level_k_ellipsoid(A,
                                        b,
                                        c,
                                        alpha,
                                        beta,
                                        k,
                                        center_check_atol=.1,
                                        debug_print=debug_print)
    print("test1: completed {} iterations without a problem".format(i + 1))
    return
Beispiel #3
0
def bootstrap_resample(X, n=None):
    """ Return a bootstrap resampled array_like

    X : array_like data to resample
    n : int, optional length of resampled array, equal to len(X) if n==None
    """
    if n == None:
        n = len(X)

    resample_i = np_floor(np_rand(n) * len(X)).astype(int)
    X_resample = X[resample_i]
    return X_resample
def read_embeddings(path, offset, random_state=42):
    """Load embeddings file.
    """
    word_embeddings = [[] for i in range(offset)]
    word_indexes = {}
    with open(path, "r", encoding="utf-8") as emb_file:
        emb_file.readline()
        for line in emb_file:
            fields = line.partition(EMB_SEP_CHAR)
            word = fields[0].strip()
            own_strip = str.strip
            emb_values = np_array(
                [float(x) for x in own_strip(fields[-1]).split(EMB_SEP_CHAR)])
            word_indexes[word] = len(word_embeddings)
            word_embeddings.append(emb_values)

    # Offset = 2; Padding and OOV.
    np_seed(random_state)
    word_embeddings[0] = 2 * 0.1 * np_rand(len(word_embeddings[2])) - 1
    word_embeddings[1] = 2 * 0.1 * np_rand(len(word_embeddings[2])) - 1

    return (word_embeddings, word_indexes)
Beispiel #5
0
def Skew(x,y,dat,noise=3):
    interp=sp_interp2d(x,y,dat)
    dx=x[1]-x[0]
    ySkew=np_arange(np_amin(y)-dx*x.size,np_amax(y),dx)
    DAT=np_empty((ySkew.size,x.size))

    yMax=np_amax(y); yMin=np_amin(y)
    for i in range(ySkew.size):
        for j in range(x.size):
            if ySkew[i]+j*dx > yMax or ySkew[i]+j*dx < yMin:
                DAT[i,j]=(np_rand(1)-0.5)*noise
            else:
                DAT[i,j]=interp(x[j],ySkew[i]+j*dx)

    return ySkew,DAT
Beispiel #6
0
def rand(*shape: int, diff=False, name='Tensor[rand]') -> Tensor:
    ''' Random values in a given shape.
    '''

    return Tensor(np_rand(*shape), diff=diff, name=name)
Beispiel #7
0
        # 		Mij = 0
        # 		for k in range(n):
        # 			Mij = Mij + D[k]*x[k,i]*x[k,j]
        # 		if i == j: lst_eq.append(Mij - 1)
        # 		else: lst_eq.append(Mij)
        # constraint = np.array(lst_eq)
        constraint = np_sum(
            np_square(np_diagonal(np_dot(x, np_trans(x)) - np_eye(n))))
        return ans + c * constraint


# def f_jac(x,A,n,dim,D,c):
# 	ans = 0.0
# 	jac = np_zeros((dim,1))
# 	jac[] = np_dot(A,x)

A = [[1, 1, 0, 0], [1, 1, 1, 1], [0, 1, 1, 0], [0, 1, 0, 1]]
A_dense = np_array(A)
D = np_sum(A_dense, axis=0)
A = scipy.sparse.csr_matrix(A_dense)
# A = A_dense

A_sq = np_dot(A, A)

dim = 1
n = 4

res = optimize.minimize(partial(f, A=A, n=n, dim=dim, D=D, c=1),
                        np_rand(n * dim))

print res.x