Ejemplo n.º 1
0
def query_compare():
    old_db_data = query(test_factor, (start_time, second_end_time))
    threshold_loc = old_db_data.index.get_loc(first_end_time) + 1

    new_db_data = HDF5Engine.query(
        ParamsParser.from_dict(
            db_path, {
                "rel_path":
                'test',
                'start_time':
                start_time,
                'end_time':
                second_end_time,
                "store_fmt":
                (DataClassification.STRUCTURED, DataValueCategory.NUMERIC,
                 DataFormatCategory.PANEL)
            }))
    old_db_data = old_db_data.fillna(0)
    new_db_data = new_db_data.fillna(0)
    columns1 = new_db_data.columns[:initial_size]
    columns2 = new_db_data.columns
    is_close1 = np.isclose(old_db_data.ix[:threshold_loc, columns1],
                           new_db_data.ix[:threshold_loc, columns1])
    is_close2 = np.isclose(old_db_data.ix[threshold_loc:, columns2],
                           new_db_data.iloc[threshold_loc:])
    print(np.all(is_close1))
    print(np.all(is_close2))
Ejemplo n.º 2
0
def second_insert():
    data = query(test_factor, (start_time, second_end_time))
    HDF5Engine.insert(
        data,
        ParamsParser.from_dict(
            db_path, {
                'rel_path':
                rel_path,
                'store_fmt':
                (DataClassification.STRUCTURED, DataValueCategory.NUMERIC,
                 DataFormatCategory.PANEL),
                'dtype':
                np_dtype('float64')
            }))
Ejemplo n.º 3
0
def first_insert():
    data = query(test_factor,
                 (start_time, first_end_time)).iloc[:, :initial_size]
    columns = list(data.columns)
    shuffle(columns)
    data = data.loc[:, columns]
    HDF5Engine.insert(
        data,
        ParamsParser.from_dict(
            db_path, {
                'rel_path':
                rel_path,
                'store_fmt':
                (DataClassification.STRUCTURED, DataValueCategory.NUMERIC,
                 DataFormatCategory.PANEL),
                'dtype':
                np_dtype('float64')
            }))
Ejemplo n.º 4
0
from numpy import dtype as np_dtype

from database.hdf5Engine.dbcore import HDF5Engine
from database.const import DataFormatCategory, DataValueCategory, DataClassification
from database.db import ParamsParser
from fmanager import query

TEST_FACTOR = 'CLOSE'
start_time = '2017-01-01'
end_time = '2018-01-15'
new_end = '2018-02-01'

sample_df = query(TEST_FACTOR, (start_time, end_time))
new_data = query(TEST_FACTOR, (end_time, new_end))

db_path = r'C:\Users\c\Desktop\test'
# file_path = join(db_path, 'test.h5')
# if exists(file_path):
#     remove(file_path)
HDF5Engine.insert(
    new_data,
    ParamsParser.from_dict(
        db_path, {
            "rel_path":
            'test',
            "store_fmt": (DataClassification.STRUCTURED,
                          DataValueCategory.NUMERIC, DataFormatCategory.PANEL),
            "dtype":
            np_dtype('float64')
        }))
Ejemplo n.º 5
0
#!/usr/bin/env python
# -*- coding:utf-8
"""
Author:  Hao Li
Email: [email protected]
Github: https://github.com/SAmmer0
Created: 2018/2/27
"""

from database.const import DataClassification, DataFormatCategory, DataValueCategory
from database.jsonEngine.dbcore import JSONEngine
from database.db import ParamsParser

db_path = r'C:\Users\c\Desktop\test'
json_db = 'sjson_test'

print(
    JSONEngine.remove_data(
        ParamsParser.from_dict(
            db_path, {
                'rel_path':
                json_db,
                'store_fmt':
                (DataClassification.STRUCTURED, DataValueCategory.CHAR,
                 DataFormatCategory.TIME_SERIES)
            })))
Ejemplo n.º 6
0
query_start_time = '2017-05-01'
query_end_time = '2017-12-04'

sample_data = query('ZX_IND', (sample_start_time, sample_end_time))

db_path = r'C:\Users\c\Desktop\test'
json_db = 'df_query_test'
folder_path = join(db_path, json_db)
if exists(folder_path):
    rmtree(folder_path)
JSONEngine.insert(
    sample_data,
    ParamsParser.from_dict(
        db_path, {
            'rel_path':
            json_db,
            'store_fmt': (DataClassification.STRUCTURED,
                          DataValueCategory.CHAR, DataFormatCategory.PANEL)
        }))

stime = time()
old_db_data = query('ZX_IND', (query_start_time, query_end_time))
etime = time()
print(etime - stime)
stime = time()
jsondb_data = JSONEngine.query(
    ParamsParser.from_dict(
        db_path, {
            'rel_path':
            json_db,
            'store_fmt': (DataClassification.STRUCTURED,
Ejemplo n.º 7
0
# -*- encoding: utf-8

from database.hdf5Engine.dbcore import HDF5Engine
from database.db import ParamsParser
from database.const import DataClassification, DataValueCategory, DataFormatCategory

db_path = r'C:\Users\c\Desktop\test'
rel_path = 'test_remove'

result = HDF5Engine.remove_data(ParamsParser.from_dict(db_path, {'rel_path': rel_path,
                                                        'store_fmt': (DataClassification.STRUCTURED, 
                                                                      DataValueCategory.NUMERIC,
                                                                      DataFormatCategory.PANEL)}))
print(result)
Ejemplo n.º 8
0
# @Version : $Id$

import numpy as np

from database.hdf5Engine.dbcore import HDF5Engine
from database.db import ParamsParser
from database.const import DataClassification, DataValueCategory, DataFormatCategory
from fmanager import query

start_time = '2017-01-01'
end_time = '2018-01-01'

db_path = r'C:\Users\c\Desktop\test'
data = HDF5Engine.query(
    ParamsParser.from_dict(
        db_path, {
            "rel_path":
            'test_series',
            'start_time':
            start_time,
            'end_time':
            end_time,
            "store_fmt":
            (DataClassification.STRUCTURED, DataValueCategory.NUMERIC,
             DataFormatCategory.TIME_SERIES)
        }))
fm_data = query('CLOSE', (start_time, end_time)).iloc[:, 0]
data = data.fillna(-10000)
fm_data = fm_data.fillna(-10000)
print(np.all(data == fm_data))
Ejemplo n.º 9
0
# -*- encoding: utf-8

from database.hdf5Engine.dbcore import HDF5Engine
from database.const import DataClassification, DataFormatCategory, DataValueCategory
from database.db import ParamsParser

db_path = r'C:\Users\c\Desktop\test'
src_path = 'test_move_src'
dest_path = 'test_move_dest'
src_params = ParamsParser.from_dict(
    db_path, {
        'rel_path':
        src_path,
        'store_fmt': (DataClassification.STRUCTURED, DataValueCategory.NUMERIC,
                      DataFormatCategory.PANEL)
    })
dest_params = ParamsParser.from_dict(
    db_path, {
        'rel_path':
        dest_path,
        'store_fmt': (DataClassification.STRUCTURED, DataValueCategory.NUMERIC,
                      DataFormatCategory.PANEL)
    })
result = HDF5Engine.move_to(src_params, dest_params)
print(result)
Ejemplo n.º 10
0
#!/usr/bin/env python
# -*- coding:utf-8
"""
Author:  Hao Li
Email: [email protected]
Github: https://github.com/SAmmer0
Created: 2018/3/14
"""

from database.db import ParamsParser
from database.const import DataClassification
from database.pickleEngine.dbcore import PickleEngine

db_path = r'C:\Users\c\Desktop\test\pickle_test'

params = ParamsParser.from_dict(db_path, {'rel_path': 'test',
                                          'store_fmt': (DataClassification.UNSTRUCTURED, )})

print(PickleEngine.query(params))
Ejemplo n.º 11
0
OVERLAP_INSERT_FLAG = False   # 插入的数据是否有重叠
TEST_SECOND_FLAG = True    # 是否进行第二次插入

first_start = '2017-01-01'
first_end = '2017-06-01'

if OVERLAP_INSERT_FLAG:
    second_start = '2017-04-01'
    second_end = '2018-02-01'
else:
    second_start = '2017-06-02'
    second_end = '2018-02-01'

first_sample = query('ZX_IND', (first_start, first_end)).iloc[:, 0]
second_sample = query('ZX_IND', (second_start, second_end)).iloc[:, 0]

db_path = r'C:\Users\c\Desktop\test'
json_db = 'sjson_test'
folder_path = join(db_path, json_db)
if exists(folder_path):
    rmtree(folder_path)

print(JSONEngine.insert(first_sample, ParamsParser.from_dict(db_path,
                                                             {'rel_path': json_db,
                                                              'store_fmt': (DataClassification.STRUCTURED, DataValueCategory.CHAR, DataFormatCategory.TIME_SERIES)})))
if TEST_SECOND_FLAG:
    print(JSONEngine.insert(second_sample, ParamsParser.from_dict(db_path,
                                                                 {'rel_path': json_db,
                                                                  'store_fmt': (DataClassification.STRUCTURED, DataValueCategory.CHAR, DataFormatCategory.TIME_SERIES)})))