Пример #1
0
def vec_generator(socket, data_type, buffer, nullMap):
    '''
    generate a numpy array from a xxdb vector
    :param socket: TCP socket
    :param data_type: xxdb data type
    :return: the python corresponding data type
    '''
    row = DATA_UNPACKER_SCALAR[DT_INT](socket, buffer, nullMap)
    col = DATA_UNPACKER_SCALAR[DT_INT](socket, buffer, nullMap)
    size = row * col
    if data_type in [DT_SYMBOL, DT_STRING]:

        vc = []
        for i in xrange(size):
            vc.append(read_string(socket, buffer))
        """
        while True:
            packet = recvall(socket, 4096)
            if not packet or not len(packet):
                break
            data += packet
        (data.split('\x00\x00')[0].split('\x00')[:size])
        """
        return np.array(vc, dtype=object)
    else:
        return np.array(list(DATA_UNPACKER[data_type](socket, size, buffer, nullMap)))
Пример #2
0
def table_generator(socket, buffer, nullMap):
    """
    Generate a pandas data frame from xxdb table object
    :param socket:
    :return:
    """
    rows = DATA_UNPACKER_SCALAR[DT_INT](socket, buffer, nullMap)
    cols = DATA_UNPACKER_SCALAR[DT_INT](socket, buffer, nullMap)
    tableName = read_string(socket, buffer)
    """ read column names """
    colNameDict = dict()
    colNames = []
    for i in range(cols):
        name = read_string(socket, buffer)
        colNameDict[name] = len(colNameDict)
        colNames.append(name)
    """ read columns and generate a pandas data frame"""
    df = pd.DataFrame()
    with warnings.catch_warnings():
        warnings.simplefilter("ignore")
        df.__2xdbColumnTypes__ = dict()
    for col in colNames:
        data_form, data_type = get_form_type(socket, buffer, nullMap)

        # print(data_type)
        if data_form != DF_VECTOR:
            raise Exception("column " + col + "in table " + tableName +
                            " must be a vector!")

        df[col] = TABLE_GEN_HANDLER.get(data_type,
                                        _default_handler)(data_type, socket,
                                                          buffer, nullMap)
        # print(df)

        if data_type in [DT_BYTE, DT_SHORT]:
            data_type = DT_INT
        if data_type in [DT_SYMBOL]:
            data_type = DT_STRING
        if data_type in [DT_FLOAT]:
            data_type = DT_DOUBLE
        if data_type >= 6 and data_type <= 14:
            # TODO: improve datetime transmission
            data_type = DT_DATETIME64
        df.__2xdbColumnTypes__[col] = data_type

    return df
Пример #3
0
def table_str_col_generator(socket, buffer, nullMap):
    row = DATA_UNPACKER_SCALAR[DT_INT](socket, buffer, nullMap)
    col = DATA_UNPACKER_SCALAR[DT_INT](socket, buffer, nullMap)
    size = row * col
    vc = np.array([read_string(socket, buffer) for i in xrange(size)])
    return vc
Пример #4
0
DATA_UNPACKER_SCALAR[DT_SHORT] = lambda x, y, nullMap: swap_fromxxdb(Struct(endianness('h')).unpack(recvall(x, DATA_SIZE[DT_SHORT], y))[0], DT_SHORT, nullMap)
DATA_UNPACKER_SCALAR[DT_INT] = lambda x, y, nullMap: swap_fromxxdb(Struct(endianness('i')).unpack((recvall(x, DATA_SIZE[DT_INT], y)))[0], DT_INT, nullMap)
DATA_UNPACKER_SCALAR[DT_LONG] = lambda x, y, nullMap: swap_fromxxdb(Struct(endianness('q')).unpack((recvall(x, DATA_SIZE[DT_LONG], y)))[0], DT_LONG, nullMap)
DATA_UNPACKER_SCALAR[DT_DATE] = lambda x, y, nullMap: Date(Struct(endianness('i')).unpack((recvall(x, DATA_SIZE[DT_DATE], y)))[0])
DATA_UNPACKER_SCALAR[DT_MONTH] = lambda x, y, nullMap: Month(Struct(endianness('i')).unpack(recvall(x, DATA_SIZE[DT_MONTH], y))[0])
DATA_UNPACKER_SCALAR[DT_TIME] = lambda x, y, nullMap: Time(Struct(endianness('i')).unpack(recvall(x, DATA_SIZE[DT_TIME], y))[0])
DATA_UNPACKER_SCALAR[DT_MINUTE] = lambda x, y, nullMap: Minute(Struct(endianness('i')).unpack(recvall(x, DATA_SIZE[DT_MINUTE], y))[0])
DATA_UNPACKER_SCALAR[DT_SECOND] = lambda x, y, nullMap: Second(Struct(endianness('i')).unpack(recvall(x, DATA_SIZE[DT_SECOND], y))[0])
DATA_UNPACKER_SCALAR[DT_DATETIME] = lambda x, y, nullMap: Datetime(Struct(endianness('i')).unpack(recvall(x, DATA_SIZE[DT_DATETIME], y))[0])
DATA_UNPACKER_SCALAR[DT_TIMESTAMP] = lambda x, y, nullMap: Timestamp(Struct(endianness('q')).unpack((recvall(x, DATA_SIZE[DT_TIMESTAMP], y)))[0])
DATA_UNPACKER_SCALAR[DT_NANOTIME] = lambda x, y, nullMap: NanoTime(Struct(endianness('q')).unpack(recvall(x, DATA_SIZE[DT_NANOTIME], y))[0])
DATA_UNPACKER_SCALAR[DT_NANOTIMESTAMP] = lambda x, y, nullMap: NanoTimestamp(Struct(endianness('q')).unpack((recvall(x, DATA_SIZE[DT_NANOTIMESTAMP], y)))[0])
DATA_UNPACKER_SCALAR[DT_DATETIME64] = lambda x, y, nullMap: NanoTimestamp(Struct(endianness('q')).unpack(recvall(x, DATA_SIZE[DT_NANOTIMESTAMP], y))[0])
DATA_UNPACKER_SCALAR[DT_FLOAT] = lambda x, y, nullMap: swap_fromxxdb(Struct(endianness('f')).unpack(recvall(x, DATA_SIZE[DT_FLOAT], y))[0], DT_FLOAT, nullMap)
DATA_UNPACKER_SCALAR[DT_DOUBLE] = lambda x, y, nullMap: swap_fromxxdb(Struct(endianness('d')).unpack((recvall(x, DATA_SIZE[DT_DOUBLE], y)))[0], DT_DOUBLE, nullMap)
DATA_UNPACKER_SCALAR[DT_SYMBOL] = lambda x, y, nullMap: read_string(x, y)
DATA_UNPACKER_SCALAR[DT_STRING] = lambda x, y, nullMap: read_string(x, y)
DATA_UNPACKER_SCALAR[DT_ANY] = lambda x, y, nullMap: None
DATA_UNPACKER_SCALAR[DT_DICTIONARY] = lambda x, y, nullMap: None
DATA_UNPACKER_SCALAR[DT_OBJECT] = lambda x, y, nullMap: None

DATA_UNPACKER = dict()
DATA_UNPACKER[DT_VOID] = lambda x, y, z, nullMap: list(map(lambda z: np.where(z == DBNAN[DT_BOOL], nullMap[DT_VOID], z), [np.array(Struct(str(y)+'b').unpack(recvall(x, DATA_SIZE[DT_BOOL]*y, z)))]))[0]
DATA_UNPACKER[DT_BOOL] = lambda x, y, z, nullMap: list(map(lambda z: np.where(z == DBNAN[DT_BOOL], nullMap[DT_BOOL], z), [np.array(Struct(str(y)+'b').unpack(recvall(x, DATA_SIZE[DT_BOOL]*y, z)))]))[0]
DATA_UNPACKER[DT_BYTE] = lambda x, y, z, nullMap: list(map(lambda z: np.where(z == DBNAN[DT_BYTE], nullMap[DT_BYTE], z), [np.array(Struct(str(y)+'b').unpack(recvall(x, DATA_SIZE[DT_BYTE]*y, z)))]))[0]
DATA_UNPACKER[DT_SHORT] = lambda x, y, z, nullMap: list(map(lambda z: np.where(z == DBNAN[DT_SHORT], nullMap[DT_SHORT], z), [np.array(Struct(endianness(str(y)+'h')).unpack(recvall(x, DATA_SIZE[DT_SHORT]*y, z)))]))[0]
DATA_UNPACKER[DT_INT] = lambda x, y, z, nullMap: list(map(lambda z: np.where(z == DBNAN[DT_INT], nullMap[DT_INT], z), [np.array(Struct(endianness(str(y)+'i')).unpack(recvall(x, DATA_SIZE[DT_INT]*y, z)))]))[0]
DATA_UNPACKER[DT_LONG] = lambda x, y, z, nullMap: list(map(lambda z: np.where(z == DBNAN[DT_LONG], nullMap[DT_LONG], z), [np.array(Struct(endianness(str(y)+'q')).unpack(recvall(x, DATA_SIZE[DT_LONG]*y, z)))]))[0]
DATA_UNPACKER[DT_FLOAT] = lambda x, y, z, nullMap: list(map(lambda z: np.where(z == DBNAN[DT_FLOAT], nullMap[DT_FLOAT], z), [np.array(Struct(endianness(str(y)+'f')).unpack(recvall(x, DATA_SIZE[DT_FLOAT]*y, z)))]))[0]
DATA_UNPACKER[DT_DOUBLE] = lambda x, y, z, nullMap: list(map(lambda z: np.where(z == DBNAN[DT_DOUBLE], nullMap[DT_DOUBLE], z), [np.array(Struct(endianness(str(y)+'d')).unpack((recvall(x, DATA_SIZE[DT_DOUBLE]*y, z))))]))[0]
DATA_UNPACKER[DT_DATE] = lambda x, y, z, nullMap: Struct(endianness(str(y)+'i')).unpack((recvall(x, DATA_SIZE[DT_DATE]*y, z)))