def test_search_number_data(): data = np.array([[0., 0.], [0.1, 0.1], [0.2, 0.2], [0.2, 0.8], [0.1, 0.9], [0., 1.], [0.8, 0.8], [0.9, 0.9], [1., 1.]]) target = np.array([[0], [0], [0], [1], [1], [1], [2], [2], [2]]) text_data = np.array([["a"], ["a"], ["a"], ["b"], ["b"], ["b"], ["c"], ["c"], ["c"]]) t = Topology() t.load_data(data, text_data=text_data) t.fit_transform(metric=None, lens=None) t.map(resolution=2, overlap=0.3) t.color(target, color_method="mean", color_type="rgb", normalize=True) search_dicts = [{ "data_type": "number", "operator": ">", "column": 0, "value": 0.7 }] t.search_from_values(search_dicts=search_dicts, target=None, search_type="and") test_color = ['#cccccc', '#cccccc', '#b20000'] assert t.hex_colors == test_color
def test_map(): data = np.array([[0., 0.], [0.25, 0.25], [0.5, 0.5], [0.75, 0.75], [1., 1.], [1., 0.], [0.25, 0.75], [0.75, 0.25], [0., 1.]]) t = Topology() t.load_data(data) t.fit_transform(metric=None, lens=None) t.map(resolution=2, overlap=0.3, eps=0.3, min_samples=3) test_nodes = np.array([[0.25, 0.25], [0.25, 0.75], [0.75, 0.25], [0.75, 0.75]]) test_edges = np.array([[0, 1], [0, 2], [0, 3], [1, 2], [1, 3], [2, 3]]) assert_array_equal(t.nodes, test_nodes) assert_array_equal(t.edges, test_edges)
def test_color_mean_gray(): data = np.array([[0., 0.], [0.1, 0.1], [0.2, 0.2], [0.2, 0.8], [0.1, 0.9], [0., 1.], [0.8, 0.8], [0.9, 0.9], [1., 1.]]) target = np.array([[0], [0], [0], [1], [1.1], [0.9], [2], [2], [2]]) t = Topology() t.load_data(data) t.fit_transform(metric=None, lens=None) t.map(resolution=2, overlap=0.3, eps=0.2, min_samples=3) t.color(target, color_method="mean", color_type="gray", normalize=True) test_color = ['#dcdcdc', '#787878', '#141414'] assert t.hex_colors == test_color
def test_transform_multi_lens(): data = np.array([[0., 0.], [0., 1.], [1., 1.]]) t = Topology() t.load_data(data) metric = "hamming" lens = [L1Centrality(), GaussianDensity(h=0.25)] t.fit_transform(metric=metric, lens=lens) test_data = np.array([[1., 0.], [0., 1.], [1., 0.]]) assert_array_equal(t.point_cloud, test_data)
def test_transform_none_none(): data = np.array([[0., 0.], [1., 1.]]) t = Topology() t.load_data(data) metric = None lens = None t.fit_transform(metric=metric, lens=lens) test_data = np.array([[0., 0.], [1., 1.]]) assert_array_equal(t.point_cloud, test_data)
def test_transform_none_pca(): data = np.array([[0., 1.], [1., 0.]]) t = Topology() t.load_data(data) metric = None lens = [PCA(components=[0])] t.fit_transform(metric=metric, lens=lens) test_data = np.array([0., 1.]) test_data = test_data.reshape(test_data.shape[0], 1) assert_array_equal(t.point_cloud, test_data)
def test_map_min_samples_under_zero(): data = np.array([[0., 0.], [0.25, 0.25], [0.5, 0.5], [0.75, 0.75], [1., 1.], [1., 0.], [0.25, 0.75], [0.75, 0.25], [0., 1.]]) t = Topology() t.load_data(data) t.fit_transform(metric=None, lens=None) with pytest.raises(Exception): t.map(min_samples=-1)
def test_color_different_size_input(): data = np.array([[0., 0.], [0.1, 0.1], [0.2, 0.2], [0.2, 0.8], [0.1, 0.9], [0., 1.], [0.8, 0.8], [0.9, 0.9], [1., 1.]]) target = np.array([0, 1, 2]) t = Topology() t.load_data(data) t.fit_transform() t.map() with pytest.raises(Exception): t.color(target)
def test_color_ctype(): data = np.array([[0., 0.], [0.1, 0.1], [0.2, 0.2], [0.2, 0.8], [0.1, 0.9], [0., 1.], [0.8, 0.8], [0.9, 0.9], [1., 1.]]) target = np.array([[0], [0], [0], [1], [1], [1], [2], [2], [2]]) t = Topology() t.load_data(data) t.fit_transform() t.map(resolution=2, overlap=0.3) with pytest.raises(Exception): t.color(target, color_type="hoge")
def reduction(): try: # get request params file_id = request.params.file_id target_index = request.params.target_index algorithm = int(request.params.algorithm) # get file name file_name = _get_file_name_from_id(file_id) file_path = os.path.join(DATA_DIR, file_name) # create topology instance topology = Topology(verbose=0) loader = CSVLoader(file_path) topology.load(loader=loader, standardize=True) # If target index isn't exists, use all data to calculate if target_index != '': topology.number_data, target = _split_target( topology.number_data, int(target_index)) # transform & scaling data scaler = preprocessing.MinMaxScaler(feature_range=(0.05, 0.95)) topology.fit_transform(lens=[REDUCTIONS[algorithm]], scaler=scaler) body = { "point_cloud": _ndarray_to_list(topology.point_cloud), } r = create_response(body) r.set_header('Cache-Control', 'max-age=86400') return r except Exception as e: body = json.dumps({"error_msg": e.args[0]}) r = create_response(body) return r
def test_transform_data_none(): t = Topology() with pytest.raises(Exception): t.fit_transform()