Esempio n. 1
0
def test_ndarray_object_nesting():
    # Based on issue 53
    # With nested ndarrays
    before = zeros((
        2,
        2,
    ), dtype=object)
    for i in ndindex(before.shape):
        before[i] = array([1, 2, 3])
    after = loads(dumps(before))
    assert before.shape == after.shape, \
     'shape of array changed for nested ndarrays:\n{}'.format(dumps(before, indent=2))
    assert before.dtype == before.dtype
    assert array_equal(before[0, 0], after[0, 0])
    # With nested lists
    before = zeros((
        2,
        2,
    ), dtype=object)
    for i in ndindex(before.shape):
        before[i] = [1, 2, 3]
    after = loads(dumps(before))
    assert before.shape == after.shape, \
     'shape of array changed for nested ndarrays:\n{}'.format(dumps(before, indent=2))
    assert before.dtype == before.dtype
    assert array_equal(before[0, 0], after[0, 0])
Esempio n. 2
0
def test_memory_order():
    arrC = array([[1., 2.], [3., 4.]], order='C')
    json = dumps(arrC)
    arr = loads(json)
    assert array_equal(arrC, arr)
    assert arrC.flags['C_CONTIGUOUS'] == arr.flags['C_CONTIGUOUS'] and \
     arrC.flags['F_CONTIGUOUS'] == arr.flags['F_CONTIGUOUS']
    arrF = array([[1., 2.], [3., 4.]], order='F')
    json = dumps(arrF)
    arr = loads(json)
    assert array_equal(arrF, arr)
    assert arrF.flags['C_CONTIGUOUS'] == arr.flags['C_CONTIGUOUS'] and \
     arrF.flags['F_CONTIGUOUS'] == arr.flags['F_CONTIGUOUS']
Esempio n. 3
0
def test_memory_order():
	arrC = array([[1., 2.], [3., 4.]], order='C')
	json = dumps(arrC)
	arr = loads(json)
	assert array_equal(arrC, arr)
	assert arrC.flags['C_CONTIGUOUS'] == arr.flags['C_CONTIGUOUS'] and \
		arrC.flags['F_CONTIGUOUS'] == arr.flags['F_CONTIGUOUS']
	arrF = array([[1., 2.], [3., 4.]], order='F')
	json = dumps(arrF)
	arr = loads(json)
	assert array_equal(arrF, arr)
	assert arrF.flags['C_CONTIGUOUS'] == arr.flags['C_CONTIGUOUS'] and \
		arrF.flags['F_CONTIGUOUS'] == arr.flags['F_CONTIGUOUS']
Esempio n. 4
0
def test_dump_np_scalars():
    data = [
        int8(-27),
        complex64(exp(1) + 37j),
        (
            {
                'alpha': float64(-exp(10)),
                'str-only': complex64(-1 - 1j),
            },
            uint32(123456789),
            float16(exp(-1)),
            set((
                int64(37),
                uint64(-0),
            )),
        ),
    ]
    replaced = encode_scalars_inplace(deepcopy(data))
    json = dumps(replaced)
    rec = loads(json)
    assert data[0] == rec[0]
    assert data[1] == rec[1]
    assert data[2][0] == rec[2][0]
    assert data[2][1] == rec[2][1]
    assert data[2][2] == rec[2][2]
    assert data[2][3] == rec[2][3]
    assert data[2] == tuple(rec[2])
Esempio n. 5
0
def train(genre, json, n_beats=16, threshhold=0.1):
    db = TinyDB(json)
    l = len(db)

    features = []
    kps = []
    shapes = []

    printProgressBar(0, l, prefix='Progress:', suffix='Complete', length=50)
    for i, item in enumerate(db):
        song = jt.loads(item[str(i)], cls_lookup_map=globals())

        features.append(song.features)
        kps.append(song.features.kp)
        shapes.append(song.features.kp.shape)

        printProgressBar(i + 1,
                         l,
                         prefix='Progress:',
                         suffix='Complete',
                         length=50)

    # return the feature scatterplot from the slice to the main script to be stored alongside each
    avg_shape = int(sum([p[0] for p in shapes]) / len(shapes))
    resize_kps = []
    for i, (a, s) in enumerate(zip(kps, shapes)):
        ratio = (avg_shape / s[0])
        if abs(1 - ratio) > threshhold: continue
        try:
            resize_kps.append(imresize(a, (avg_shape, 2), interp="nearest"))
        except ValueError:
            continue

    print("Displaying {}/{}".format(len(resize_kps), len(kps)))
    kde(vstack(resize_kps))
Esempio n. 6
0
def read_imp_JSON(filename):
    """read in a imp JSON file
   Read gridded interpolated magnetic perturbations (IMPs) from a specially
   formatted JSON file.
   """
    with open(filename, 'r') as fh:
        data = json_t.loads(fh.read())

    Epoch = data['Epoch']
    Latitude = data['Latitude']
    Longitude = data['Longitude']
    Radius = data['Radius']
    X = data['X']
    Y = data['Y']
    Z = data['Z']
    Label = data['Label']
    ObsLat = data['ObsLat']
    ObsLon = data['ObsLon']
    ObsRad = data['ObsRad']
    ObsX = data['ObsX']
    ObsY = data['ObsY']
    ObsZ = data['ObsZ']
    ObsFit = data['ObsFit']
    ObsName = data['ObsName']

    return (Epoch, (Latitude, Longitude, Radius), X, Y, Z, Label,
            (ObsLat, ObsLon, ObsRad), ObsX, ObsY, ObsZ, ObsFit, ObsName)
Esempio n. 7
0
def test_dump_np_scalars():
	data = [
		int8(-27),
		complex64(exp(1)+37j),
		(
			{
				'alpha': float64(-exp(10)),
				'str-only': complex64(-1-1j),
			},
			uint32(123456789),
			float16(exp(-1)),
			{
				int64(37),
				uint64(-0),
			},
		),
	]
	replaced = encode_scalars_inplace(deepcopy(data))
	json = dumps(replaced)
	rec = loads(json)
	print(data)
	print(rec)
	assert data[0] == rec[0]
	assert data[1] == rec[1]
	assert data[2][0] == rec[2][0]
	assert data[2][1] == rec[2][1]
	assert data[2][2] == rec[2][2]
	assert data[2][3] == rec[2][3]
	assert data[2] == tuple(rec[2])
Esempio n. 8
0
def test_decode_compact_mixed_compactness():
    json = '[{"__ndarray__": "b64:AAAAAAAA8D8AAAAAAAAAQAAAAAAAAAhAAAAAAAAAEEAAAAAAAAA' \
     'UQAAAAAAAABhAAAAAAAAAHEAAAAAAAAAgQA==", "dtype": "float64", "shape": [2, 4], "Corder": ' \
     'true}, {"__ndarray__": [3.141592653589793, 2.718281828459045], "dtype": "float64", "shape": [2]}]'
    data = loads(json)
    assert_equal(data[0], array([[1.0, 2.0, 3.0, 4.0], [5.0, 6.0, 7.0, 8.0]]),
                 array([pi, exp(1)]))
Esempio n. 9
0
def test_decode_compact_inline_compression():
    json = '[{"__ndarray__": "b64.gz:H4sIAAAAAAAC/2NgAIEP9gwQ4AChOKC0AJQWgdISUFoGSitAaSUorQKl1aC0BpTWgtI6UFoPShs4AABmfqWAgAAAAA==", "dtype": "float64", "shape": [4, 4], "Corder": true}]'
    data = loads(json)
    assert_equal(
        data[0],
        array([[1.0, 2.0, 3.0, 4.0], [5.0, 6.0, 7.0, 8.0],
               [9.0, 10.0, 11.0, 12.0], [13.0, 14.0, 15.0, 16.0]]))
Esempio n. 10
0
def hjrequest(client, url, rkey, params={}, typ='get', retdf=False):

    if retdf:
        params['asDf'] = True

    if typ == 'get':
        response = client.get(url,
                              data=json.dumps(params),
                              content_type='application/json')
    elif typ == 'post':
        response = client.post(url,
                               data=json.dumps(params),
                               content_type='application/json')

    responseDict = json.loads(response.get_data().decode('utf-8'))

    code = response.status_code
    message = responseDict['message']
    data = responseDict[rkey]
    if retdf:
        if data:
            data = loads(data, preserve_order=True)
            if data['arr'].any():  # Only make df is array not empty.
                data = pd.DataFrame(data['arr'], columns=data['columns'])
            else:
                data = []

    return code, message, data
Esempio n. 11
0
def test_scalars_types():
	# from: https://docs.scipy.org/doc/numpy/user/basics.types.html
	encme = []
	for dtype in DTYPES:
		for val in (dtype(0),) + get_lims(dtype):
			assert isinstance(val, dtype)
			encme.append(val)
	json = dumps(encme, indent=2)
	rec = loads(json)
	assert encme == rec
Esempio n. 12
0
def test_mixed_cls_arr():
	json = dumps(mixed_data)
	back = dict(loads(json))
	assert mixed_data.keys() == back.keys()
	assert (mixed_data['vec'] == back['vec']).all()
	assert (mixed_data['inst'].vec == back['inst'].vec).all()
	assert (mixed_data['inst'].nr == back['inst'].nr)
	assert (mixed_data['inst'].li == back['inst'].li)
	assert (mixed_data['inst'].inst.s == back['inst'].inst.s)
	assert (mixed_data['inst'].inst.dct == dict(back['inst'].inst.dct))
Esempio n. 13
0
def test_array_types():
    # from: https://docs.scipy.org/doc/numpy/user/basics.types.html
    # see also `test_scalars_types`
    for dtype in DTYPES:
        vec = [array((dtype(0), dtype(exp(1))) + get_lims(dtype), dtype=dtype)]
        json = dumps(vec)
        assert dtype.__name__ in json
        rec = loads(json)
        assert rec[0].dtype == dtype
        assert array_equal(vec, rec)
Esempio n. 14
0
def test_scalars_types():
    # from: https://docs.scipy.org/doc/numpy/user/basics.types.html
    encme = []
    for dtype in DTYPES:
        for val in (dtype(0), ) + get_lims(dtype):
            assert isinstance(val, dtype)
            encme.append(val)
    json = dumps(encme, indent=2)
    rec = loads(json)
    assert encme == rec
Esempio n. 15
0
def test_array_types():
	# from: https://docs.scipy.org/doc/numpy/user/basics.types.html
	# see also `test_scalars_types`
	for dtype in DTYPES:
		vec = [array((dtype(0), dtype(exp(1))) + get_lims(dtype), dtype=dtype)]
		json = dumps(vec)
		assert dtype.__name__ in json
		rec = loads(json)
		assert rec[0].dtype == dtype
		assert array_equal(vec, rec)
Esempio n. 16
0
def test_primitives():
    txt = dumps(deepcopy(npdata), primitives=True)
    data2 = loads(txt)
    assert isinstance(data2['vector'], list)
    assert isinstance(data2['matrix'], list)
    assert isinstance(data2['matrix'][0], list)
    assert data2['vector'] == npdata['vector'].tolist()
    assert (abs(array(data2['vector']) - npdata['vector'])).sum() < 1e-10
    assert data2['matrix'] == npdata['matrix'].tolist()
    assert (abs(array(data2['matrix']) - npdata['matrix'])).sum() < 1e-10
Esempio n. 17
0
def test_mixed_cls_arr():
    json = dumps(mixed_data)
    back = dict(loads(json))
    assert mixed_data.keys() == back.keys()
    assert (mixed_data['vec'] == back['vec']).all()
    assert (mixed_data['inst'].vec == back['inst'].vec).all()
    assert (mixed_data['inst'].nr == back['inst'].nr)
    assert (mixed_data['inst'].li == back['inst'].li)
    assert (mixed_data['inst'].inst.s == back['inst'].inst.s)
    assert (mixed_data['inst'].inst.dct == dict(back['inst'].inst.dct))
Esempio n. 18
0
def test_empty():
    # issue https://github.com/mverleg/pyjson_tricks/issues/76
    datas = [
        zeros(shape=(1, 0)),
        zeros(shape=(0, 1)),
        zeros(shape=(0, 0)),
    ]
    for data in datas:
        json = dumps(data)
        assert_equal(loads(json), data,
                     'shape = {} ; json = {}'.format(data.shape, json))
Esempio n. 19
0
def load_from_json(f):
    'Function to load JSON file and translate to dictionary with numpy elements from text file(f) define by the JSON typing'
    from json_tricks.np import loads
    with open(f,'r') as handle:
        d = dict(loads(handle.read()))
    return d
Esempio n. 20
0
def test_dumps_loads_numpy():
    json = dumps(deepcopy(npdata))
    data2 = loads(json)
    _numpy_equality(data2)
Esempio n. 21
0
def load_from_json(f):
    'Function to load JSON file and translate to dictionary with numpy elements from text file(f) define by the JSON typing'
    from json_tricks.np import loads
    with open(f, 'r') as handle:
        d = dict(loads(handle.read()))
    return d
Esempio n. 22
0
def test_decode_compact_no_inline_compression():
    json = '[{"__ndarray__": "b64:AAAAAAAA8D8AAAAAAAAAQAAAAAAAAAhAAAAAAAAAEEA=", ' \
     '"dtype": "float64", "shape": [2, 2], "Corder": true}]'
    data = loads(json)
    assert_equal(data[0], array([[1.0, 2.0], [3.0, 4.0]]))
Esempio n. 23
0
 def from_json(input_str):
     try:  # file given
         return json.load(open(input_str), preserve_order=False)
     except IOError:  # string given
         return json.loads(input_str, 'r', preserve_order=False)
Esempio n. 24
0
def test_compact():
    data = [array(list(2**(x + 0.5) for x in range(-30, +31)))]
    json = dumps(data, compression=True, properties={'ndarray_compact': True})
    back = loads(json)
    assert_equal(data, back)
Esempio n. 25
0
def test_dtype_object():
    # Based on issue 64
    arr = array(['a', 'b', 'c'], dtype=object)
    json = dumps(arr)
    back = loads(json)
    assert array_equal(back, arr)
Esempio n. 26
0
                        dest='verbose',
                        default=False,
                        help='verbose logs')

    args = parser.parse_args()
    if args.verbose:
        logging.basicConfig(level=logging.DEBUG)
    else:
        logging.basicConfig(level=logging.INFO)

    # get node service
    server_address = args.dvidAddress
    node_service = DVIDNodeService(server_address, args.uuid)

    keyvalue_store = "config"
    settings = json.loads(node_service.get(keyvalue_store, "imageInfo"))

    shape = settings['shape']
    time_range = settings['time_range']
    if args.timeRange is not None:
        time_range = (max(time_range[0], args.timeRange[0]),
                      min(time_range[1], args.timeRange[1]))

    logging.info('Downloading time range {} to {} of shape {}'.format(
        time_range, server_address, shape))

    raw_data = np.zeros(
        (time_range[1] - time_range[0], shape[0], shape[1], shape[2]))

    # download all frames
    with h5py.File(args.ilpFilename, 'w') as seg_h5:
Esempio n. 27
0
def test_dumps_loads_numpy():
	json = dumps(npdata)
	data2 = loads(json)
	_numpy_equality(data2)