Exemple #1
0
def test_polynomial_features_csr_X_dim_edges(deg, dim, interaction_only):
    X_csr = sparse_random(1000, dim, 0.5, random_state=0).tocsr()
    X = X_csr.toarray()

    est = PolynomialFeatures(deg, interaction_only=interaction_only)
    Xt_csr = est.fit_transform(X_csr)
    Xt_dense = est.fit_transform(X)

    assert isinstance(Xt_csr, sparse.csr_matrix)
    assert Xt_csr.dtype == Xt_dense.dtype
    assert_array_almost_equal(Xt_csr.A, Xt_dense)
Exemple #2
0
def test_polynomial_features_csr_X_zero_row(zero_row_index, deg, interaction_only):
    X_csr = sparse_random(3, 10, 1.0, random_state=0).tocsr()
    X_csr[zero_row_index, :] = 0.0
    X = X_csr.toarray()

    est = PolynomialFeatures(deg, include_bias=False, interaction_only=interaction_only)
    Xt_csr = est.fit_transform(X_csr)
    Xt_dense = est.fit_transform(X)

    assert isinstance(Xt_csr, sparse.csr_matrix)
    assert Xt_csr.dtype == Xt_dense.dtype
    assert_array_almost_equal(Xt_csr.A, Xt_dense)
Exemple #3
0
def test_polynomial_features_csr_X_floats(deg, include_bias, interaction_only, dtype):
    X_csr = sparse_random(1000, 10, 0.5, random_state=0).tocsr()
    X = X_csr.toarray()

    est = PolynomialFeatures(
        deg, include_bias=include_bias, interaction_only=interaction_only
    )
    Xt_csr = est.fit_transform(X_csr.astype(dtype))
    Xt_dense = est.fit_transform(X.astype(dtype))

    assert isinstance(Xt_csr, sparse.csr_matrix)
    assert Xt_csr.dtype == Xt_dense.dtype
    assert_array_almost_equal(Xt_csr.A, Xt_dense)
Exemple #4
0
 def polynomial_features_csr_X_zero_row(self, zero_row_index, deg,
                                        interaction_only):
     X_csr = sparse_random(3, 10, 1.0, random_state=0).tocsr()
     X_csr[zero_row_index, :] = 0.0
     X = X_csr.toarray()
     est = ExtendedFeatures(poly_degree=deg,
                            poly_include_bias=False,
                            poly_interaction_only=interaction_only)
     est.fit(X)
     poly = PolynomialFeatures(degree=deg,
                               include_bias=False,
                               interaction_only=interaction_only)
     poly.fit(X)
     self.assertEqual(poly.get_feature_names(), est.get_feature_names())
     Xt_dense1 = est.fit_transform(X)
     Xt_dense2 = poly.fit_transform(X)
     self.assertEqual(Xt_dense1, Xt_dense2)
Exemple #5
0
def test_pca():
    st_helper = SklearnTestHelper()
    pca = PCA(n_components=2)
    rpca = RobustPCA()
    data = np.array([[-1, -1], [-2, -1], [-3, -2], [1, 1], [2, 1], [3, 2]], dtype=np.float32)
    pca.fit(data)
    rpca.robust_pca_ = pca
    dshape = (relay.Any(), len(data[0]))
    _test_model_impl(st_helper, rpca, dshape, data)

    tSVD = TruncatedSVD(n_components=5, n_iter=7, random_state=42)
    data = sparse_random(
        100, 100, density=0.01, format="csr", dtype="float32", random_state=42
    ).toarray()
    tSVD.fit(data)
    rpca.robust_pca_ = tSVD
    dshape = (relay.Any(), len(data[0]))
    _test_model_impl(st_helper, rpca, dshape, data)
Exemple #6
0
    def _sparse_random_pattern(self):
        """Generate a sparse random pattern for each hidden layer and output
        class.

        Args:

        Returns:
        """
        densities = config.pattern_sparse_random_density
        if not isinstance(densities, list):
            densities = [densities] * (self._patterns_to_generate)

        for i in range(1, 1 + self._patterns_to_generate):
            ap = sparse_random(self._network.layer_size(i),
                               self.num_patterns,
                               density=densities[i - 1])
            ap = np.array(ap.todense())
            ap = normalize(ap, norm='l2', axis=0, copy=False)
            self._patterns.append(ap)
Exemple #7
0
    def do_random_transactions(self):
        assert self.density > 0, "Transactions density is insignificant"
        balances = self._create_balances()
        agents_size = balances.shape[0]
        
        random_max_transaction_rates = self.max_transaction_rate * np.random.rand(balances.shape[0])
        random_total_transaction_rates = sparse_random(agents_size, agents_size, density=self.density)
        normalized_random_total_transaction_rates = normalize(random_total_transaction_rates, norm='l1', axis=1)
        total_transaction_amounts = normalized_random_total_transaction_rates.T.multiply(random_max_transaction_rates * balances).T

        transactions_sended = (total_transaction_amounts > 0)
        random_comission_rates = self.max_comission_rate * self.create_similar_random_matrix(transactions_sended)
        comission_amounts = total_transaction_amounts.multiply(random_comission_rates)
        transaction_amounts = total_transaction_amounts - comission_amounts

        income = transaction_amounts.sum(axis=0).A1
        outcome = transaction_amounts.sum(axis=1).A1 + comission_amounts.sum(axis=1).A1
        new_balances = balances + income - outcome
        total_comission = comission_amounts.sum()
        
        self._set_balances(balances)
        self._set_total_comission(total_comission)
Exemple #8
0
random.shuffle(networks_list)

# Define what nodes belong to what network and what their color should be
node_network_map = dict(zip(nodes, networks_list))
colors = ['green', 'royalblue', 'red', 'orange', 'cyan']
color_map = dict(zip(networks, colors))

graph = nx.Graph()
graph.add_nodes_from(nodes)
nodes_by_color = {
    val: [node for node in graph if color_map[node_network_map[node]] == val]
    for val in colors
}

# Take random sparse matrix as adjacency matrix
mat = sparse_random(30, 30, density=0.3).todense()
for row, row_val in enumerate(nodes):
    for col, col_val in enumerate(nodes):
        if col > row and mat[
                row,
                col] != 0.0:  # Stick to upper half triangle, mat is not symmetric
            graph.add_edge(row_val, col_val, weight=mat[row, col])

# Choose a layout to visualize graph
# pos = nx.spring_layout(graph)
# import ipdb; ipdb.set_trace()
pos = partition_layout(graph, node_network_map, ratio=0.15)
edges = graph.edges()
# Get the edge weights and normalize them
weights = [abs(graph[u][v]['weight']) for u, v in edges]
weights_n = [5 * float(i) / max(weights)
Exemple #9
0
        :param x:  X: array-like, shape (n_samples, n_components)
        :return: X_original: array, shape (n_samples, n_features)
        """
        return self.model.inverse_transform(X=x)

    def set_params(self,params):
        self.model.set_params(**params)

    def transform(self,X):
        return self.model.transform(X=X)

    def get_components(self):
        return self.model.components_   # 数组, shape(n_components, n_features)   数据的维度

    def get_explained_variance(self):   # 训练样本的方差通过投影转换每个分量
        return self.model.explained_variance_  # 数组 shape(n_components)

    def get_explained_variance_ratio(self):   # 每个选定组建解释的方差百分比
        return self.model.explained_variance_ratio_    # 数组 shape(n_components)

    def get_singular_values(self):  #  每个选定components 的奇异值
        return self.model.singular_values_ # 数组 shape(n_components)


if __name__ =="__main__":
    X = sparse_random(100, 100, density=0.01, format="csr", random_state=42)
    svd = TSVD(n_components=5, n_iter=7, random_state=42)
    svd.fit(X)
    print(svd.get_explained_variance_ratio())
    print(svd.get_explained_variance_ratio().sum())
    print(svd.get_singular_values())
Exemple #10
0
dataset.head()

dataset.Target.value_counts()

dataset.Target.value_counts().plot.bar()

X = dataset.drop("Target", axis=1)
y = dataset["Target"]

X_train, X_test, y_train, y_test = train_test_split(X, y, test_size=0.2, random_state=1)

#
>>> from sklearn.decomposition import TruncatedSVD
>>> from scipy.sparse import random as sparse_random
>>> X = sparse_random(100, 100, density=0.01, format='csr',
...                   random_state=42)
>>> svd = TruncatedSVD(n_components=5, n_iter=7, random_state=42)
>>> svd.fit(X)
TruncatedSVD(n_components=5, n_iter=7, random_state=42)
>>> print(svd.explained_variance_ratio_)
>>> print(svd.explained_variance_ratio_.sum())
>>> print(svd.singular_values_)
#
>>> from sklearn.decomposition import PCA
>>> X = np.array([])
>>> pca = PCA(n_components=2)
>>> pca.fit(X)
PCA(n_components=2)
>>> print(pca.explained_variance_ratio_)
>>> print(pca.singular_values_)
>>> pca = PCA(n_components=2, svd_solver='full')