示例#1
0
 def pub_topic(self):
     #logger.debug(self.to_string() + "pub_topic() self.symbols_analyze={0}".format(self.symbols_analyze))
     if self.userid != 0:
         return
     topic_name = "t_symbols_analyze"
     topic, shards = g_datahub.get_topic(topic_name)
     record = TupleRecord(schema=topic.record_schema)
     record.values = [
         self.symbols_analyze.f_ex_id, self.symbols_analyze.f_symbol,
         self.symbols_analyze.f_timeframe, self.symbols_analyze.f_bid,
         self.symbols_analyze.f_ask, self.symbols_analyze.f_spread,
         self.symbols_analyze.f_bar_trend,
         self.symbols_analyze.f_volume_mean, self.symbols_analyze.f_volume,
         self.symbols_analyze.f_ma_period, self.symbols_analyze.f_ma_up,
         self.symbols_analyze.f_ma_low, self.symbols_analyze.f_ma_trend,
         self.symbols_analyze.f_channel_period,
         self.symbols_analyze.f_channel_up,
         self.symbols_analyze.f_channel_low,
         self.symbols_analyze.f_breakout_trend,
         self.symbols_analyze.f_breakout_ts,
         self.symbols_analyze.f_breakout_price,
         self.symbols_analyze.f_breakout_volume,
         self.symbols_analyze.f_breakout_volume_rate,
         self.symbols_analyze.f_breakout_price_highest,
         self.symbols_analyze.f_breakout_price_highest_ts,
         self.symbols_analyze.f_breakout_rate,
         self.symbols_analyze.f_breakout_rate_max,
         arrow.utcnow().timestamp * 1000
     ]
     record.shard_id = shards[randint(1, 1000) % len(shards)].shard_id
     g_datahub.pub_topic(topic_name, [record])
示例#2
0
    def test_build_tuple_record_allow_null(self):
        record_schema = RecordSchema.from_lists([
            'bigint_field', 'string_field', 'double_field', 'bool_field',
            'time_field'
        ], [
            FieldType.BIGINT, FieldType.STRING, FieldType.DOUBLE,
            FieldType.BOOLEAN, FieldType.TIMESTAMP
        ], [False, True, False, True, True])

        try:
            record0 = TupleRecord(
                schema=record_schema,
                values=[1, 'yc1', None, True, 253402271999000000])
        except InvalidParameterException:
            pass
        else:
            raise Exception(
                'build record success with none value of field not allowed null'
            )

        record1 = TupleRecord(schema=record_schema)
        try:
            record1.set_value(0, None)
        except InvalidParameterException:
            pass
        else:
            raise Exception(
                'set record success with none value of field not allowd null')
示例#3
0
    def test_put_data_record_with_limit_exceeded(self):
        project_name = 'put'
        topic_name = 'limit_exceeded'
        record_schema = RecordSchema.from_lists([
            'bigint_field', 'string_field', 'double_field', 'bool_field',
            'time_field'
        ], [
            FieldType.BIGINT, FieldType.STRING, FieldType.DOUBLE,
            FieldType.BOOLEAN, FieldType.TIMESTAMP
        ])

        record = TupleRecord(schema=record_schema,
                             values=[1, 'yc1', 10.01, True, 1455869335000000])
        record.shard_id = '0'
        try:

            def check(request):
                assert request.method == 'POST'
                assert request.url == 'http://endpoint/projects/put/topics/limit_exceeded/shards'
                content = json.loads(request.body)
                assert content['Action'] == 'pub'
                assert len(content['Records']) == 1
                assert len(content['Records'][0]['Data']) == 5
                assert content['Records'][0]['Data'][0] == '1'
                assert content['Records'][0]['Data'][1] == 'yc1'
                assert content['Records'][0]['Data'][2] == '1.001e+01'
                assert content['Records'][0]['Data'][3] == 'true'
                assert content['Records'][0]['Data'][4] == '1455869335000000'

            with HTTMock(gen_mock_api(check)):
                put_result = dh.put_records(project_name, topic_name, [record])
        except LimitExceededException:
            pass
        else:
            raise Exception('put data record success with limit exceeded!')
    def test_get_cursor(self):
        project_name = "cursor_test_p"
        topic_name = "cursor_test_t%d_1" % int(time.time())

        record_schema = RecordSchema.from_lists([
            'bigint_field', 'string_field', 'double_field', 'bool_field',
            'event_time1'
        ], [
            FieldType.BIGINT, FieldType.STRING, FieldType.DOUBLE,
            FieldType.BOOLEAN, FieldType.TIMESTAMP
        ])

        try:
            dh.create_project(project_name, '')
        except ResourceExistException:
            pass

        # make sure project wil be deleted
        try:
            try:
                dh.create_tuple_topic(project_name, topic_name, 3, 7,
                                      record_schema, '1')
                dh.wait_shards_ready(project_name, topic_name)
            except ResourceExistException:
                pass

            # put tuple records
            record = TupleRecord(
                schema=record_schema,
                values=[1, 'yc1', 10.01, True, 1455869335000000])
            record.shard_id = '0'
            record.put_attribute('AK', '47')
            records = [record for i in range(0, 3)]
            put_record_result = dh.put_records(project_name, topic_name,
                                               records)
            print(put_record_result)

            assert put_record_result.failed_record_count == 0

            time.sleep(5)
            # ======================= get cursor =======================
            cursor_oldest = dh.get_cursor(project_name, topic_name, '0',
                                          CursorType.OLDEST)
            cursor_latest = dh.get_cursor(project_name, topic_name, '0',
                                          CursorType.LATEST)
            cursor_sequence_1 = dh.get_cursor(project_name, topic_name, '0',
                                              CursorType.SEQUENCE, 0)
            cursor_sequence_2 = dh.get_cursor(project_name, topic_name, '0',
                                              CursorType.SEQUENCE, 2)
            cursor_system_time = dh.get_cursor(project_name, topic_name, '0',
                                               CursorType.SYSTEM_TIME, 0)
            print(cursor_system_time)

            # assert cursor_oldest.cursor == cursor_sequence_1.cursor
            # assert cursor_latest.cursor == cursor_sequence_2.cursor
            # assert cursor_oldest.cursor == cursor_system_time.cursor
        finally:
            clean_topic(dh, project_name)
            dh.delete_project(project_name)
示例#5
0
    def test_put_tuple_record_pb_success(self):
        project_name = 'put'
        topic_name = 'success'
        record_schema = RecordSchema.from_lists(
            ['bigint_field', 'string_field', 'double_field', 'bool_field', 'time_field'],
            [FieldType.BIGINT, FieldType.STRING, FieldType.DOUBLE, FieldType.BOOLEAN, FieldType.TIMESTAMP])

        records = []
        record0 = TupleRecord(schema=record_schema, values=[1, 'yc1', 10.01, True, 253402271999000000])
        record0.shard_id = '0'
        record0.shard_id = '0'
        records.append(record0)

        record1 = TupleRecord(schema=record_schema,
                              values=[-9223372036854775808, 'yc1', 10.01, True, -62135798400000000])
        record1.hash_key = '4FFFFFFFFFFFFFFD7FFFFFFFFFFFFFFD'
        records.append(record1)

        record2 = TupleRecord(schema=record_schema, values=[9223372036854775807, 'yc1', 10.01, True, 1455869335000000])
        record2.partition_key = 'TestPartitionKey'
        records.append(record2)

        with HTTMock(datahub_pb_api_mock):
            put_result = dh2.put_records(project_name, topic_name, records)

        assert put_result.failed_record_count == 0
        assert put_result.failed_records == []
示例#6
0
def dh_infer_label(object_id , ai_label, score):
    try:

        dh.wait_shards_ready(project_name, tuple_topic)
        # print("shards all ready!!!")
        # print("=======================================\n\n")

        topic_result = dh.get_topic(project_name, tuple_topic)
#        print(topic_result)
        if topic_result.record_type != RecordType.TUPLE:
            print("topic type illegal!")
            sys.exit(-1)
#        print("=======================================\n\n")

        record_schema = topic_result.record_schema
        #
        records0 = []


        record1 = TupleRecord(schema=record_schema)
        record1.set_value('object_id', object_id)
        record1.set_value('ai_label', ai_label)
        record1.set_value('score', float(score))
        record1.hash_key = '4FFFFFFFFFFFFFFD7FFFFFFFFFFFFFFD'
        records0.append(record1)

        dh.put_records(project_name, tuple_topic, records0)

    except DatahubException as e:
        print(e)
        sys.exit(-1)
示例#7
0
def datahub_import(thread_info):
    try:
        # 连接datahub
        dh = DataHub()
        # block等待所有shard状态ready
        dh.wait_shards_ready(thread_info[2], thread_info[3])
        # 获取topic
        topic = dh.get_topic(thread_info[3], thread_info[2])
        # 获取分区
        shards = dh.list_shards(thread_info[2], thread_info[3])

        # 写入数组记录
        records = []
        # 枚举目录下所有文件
        file_arr = enum_file(thread_info[0], thread_info[1])
        for data_file in file_arr:
            # 读取文本内容
            data_ = open(data_file, 'rb')
            content = data_.readlines()
            data_.close()
            # shard计数器
            i = 0
            # 逐行处理
            for line in content:
                try:
                    # 字符串分割
                    value_arr = line[:-2].split(thread_info[4])
                    values = []
                    for value in value_arr:
                        values.append(value)
                    # 生成record对象
                    record = TupleRecord(schema=topic.record_schema,
                                         values=values)
                    # 设置shard分区
                    record.shard_id = shards[i % len(shards)].shard_id
                    # 写入数组
                    records.append(record)
                    # 计数递增
                    i += 1
                    # 判断是否达到1W条
                    if 10000 <= i:
                        put_records(dh, thread_info[2], thread_info[3],
                                    records, thread_info[5], data_file)
                        records = []
                        i = 0
                except:
                    continue
            # 尝试写入datahub,并记录错误记录
            put_records(dh, thread_info[2], thread_info[3], records,
                        thread_info[5], data_file)
            # TODO 文件处理完毕后续操作
        print '%s上传完毕' % thread_info[0]
    except:
        traceback.print_exc()
示例#8
0
    def test_put_data_record_with_empty_topic_name(self):
        project_name = 'valid'
        topic_name = ''
        record_schema = RecordSchema.from_lists(
            ['bigint_field', 'string_field', 'double_field', 'bool_field', 'time_field'],
            [FieldType.BIGINT, FieldType.STRING, FieldType.DOUBLE, FieldType.BOOLEAN, FieldType.TIMESTAMP])

        record = TupleRecord(schema=record_schema, values=[1, 'yc1', 10.01, True, 1455869335000000])
        record.shard_id = '0'
        try:
            put_result = dh.put_records(project_name, topic_name, [record])
        except InvalidParameterException:
            pass
        else:
            raise Exception('put data record success with empty topic name!')
示例#9
0
    def test_put_malformed_tuple_record(self):
        project_name = 'put'
        topic_name = 'malformed'
        record_schema = RecordSchema.from_lists(
            ['bigint_field', 'string_field', 'double_field', 'bool_field', 'time_field'],
            [FieldType.BIGINT, FieldType.STRING, FieldType.DOUBLE, FieldType.BOOLEAN, FieldType.TIMESTAMP])

        record = TupleRecord(schema=record_schema, values=[1, 'yc1', 10.01, True, 1455869335000000])
        record.shard_id = '0'
        try:
            with HTTMock(datahub_api_mock):
                put_result = dh.put_records(project_name, topic_name, [record])
        except InvalidParameterException:
            pass
        else:
            raise Exception('put malformed tuple record success!')
示例#10
0
    def test_put_data_record_with_unexisted_topic_name(self):
        project_name = 'valid'
        topic_name = 'unexisted'
        record_schema = RecordSchema.from_lists(
            ['bigint_field', 'string_field', 'double_field', 'bool_field', 'time_field'],
            [FieldType.BIGINT, FieldType.STRING, FieldType.DOUBLE, FieldType.BOOLEAN, FieldType.TIMESTAMP])

        record = TupleRecord(schema=record_schema, values=[1, 'yc1', 10.01, True, 1455869335000000])
        record.shard_id = '0'
        try:
            with HTTMock(datahub_api_mock):
                put_result = dh.put_records(project_name, topic_name, [record])
        except ResourceNotFoundException:
            pass
        else:
            raise Exception('put data record success with unexisted topic name!')
示例#11
0
    def fetch_ticker_2_datahub(self, msg):
        #logger.debug(self.to_string() + 'callback_ticker() msg={0}'.format(msg))
        topic, shards = self.topic['t_ticker']
        records = []
        for t in msg:
            ticker = self.ticker_2_ccxt(t)
            symbol = ticker['symbol']
            f_ts = ticker['timestamp'] and ticker['timestamp'] or int(
                arrow.utcnow().timestamp * 1000)
            f_bid = ticker['bid'] and ticker['bid'] or 0
            f_bid_volume = ticker['bidVolume'] and ticker['bidVolume'] or 0
            f_ask = ticker['ask'] and ticker['ask'] or 0
            f_ask_volume = ticker['askVolume'] and ticker['askVolume'] or 0
            f_vwap = ticker['vwap'] and ticker['vwap'] or 0
            f_open = ticker['open'] and ticker['open'] or 0
            f_high = ticker['high'] and ticker['high'] or 0
            f_low = ticker['low'] and ticker['low'] or 0
            f_close = ticker['close'] and ticker['close'] or 0
            f_last = ticker['last'] and ticker['last'] or 0
            f_previous_close = ticker['previousClose'] and ticker[
                'previousClose'] or 0
            f_change = ticker['change'] and ticker['change'] or 0
            f_percentage = ticker['percentage'] and ticker['percentage'] or 0
            f_average = ticker['average'] and ticker['average'] or 0
            f_base_volume = ticker['baseVolume'] and ticker['baseVolume'] or 0
            f_quote_volume = ticker['quoteVolume'] and ticker[
                'quoteVolume'] or 0
            f_ts_update = arrow.utcnow().timestamp
            v = [
                ws_binance.ex_id, symbol, f_ts, f_bid, f_bid_volume, f_ask,
                f_ask_volume, f_vwap, f_open, f_high, f_low, f_close, f_last,
                f_previous_close, f_change, f_percentage, f_average,
                f_base_volume, f_quote_volume, f_ts_update
            ]
            record = TupleRecord(schema=topic.record_schema)
            record.values = v
            i = random.randint(1, 100) % len(shards)
            record.shard_id = shards[i].shard_id
            #logger.debug(self.to_string() + 'callback_ticker()  record={}'.format(record))
            records.append(record)

        logger.debug(self.to_string() +
                     'callback_ticker() len(records)={0}'.format(len(records)))
        try:
            g_datahub.pub_topic('t_ticker', records)
        except Exception as e:
            logger.info(self.to_string() + 'callback_ticker() e={0}'.format(e))
示例#12
0
 async def fetch_ticker(self, ex_id, topic, shards, symbol):
     self.init_exchange(ex_id)
     #logger.debug(self.to_string() + "fetch_ticker({0})".format(ex_id))
     records = []
     ticker = await self.exchanges[ex_id].ex.fetch_ticker(symbol)
     f_ts = ticker['timestamp'] and ticker['timestamp'] or int(
         arrow.utcnow().timestamp * 1000)
     f_bid = ticker['bid'] and ticker['bid'] or 0
     f_bid_volume = ticker['bidVolume'] and ticker['bidVolume'] or 0
     f_ask = ticker['ask'] and ticker['ask'] or 0
     f_ask_volume = ticker['askVolume'] and ticker['askVolume'] or 0
     f_vwap = ticker['vwap'] and ticker['vwap'] or 0
     f_open = ticker['open'] and ticker['open'] or 0
     f_high = ticker['high'] and ticker['high'] or 0
     f_low = ticker['low'] and ticker['low'] or 0
     f_close = ticker['close'] and ticker['close'] or 0
     f_last = ticker['last'] and ticker['last'] or 0
     f_previous_close = ticker['previousClose'] and ticker[
         'previousClose'] or 0
     f_change = ticker['change'] and ticker['change'] or 0
     f_percentage = ticker['percentage'] and ticker['percentage'] or 0
     f_average = ticker['average'] and ticker['average'] or 0
     f_base_volume = ticker['baseVolume'] and ticker['baseVolume'] or 0
     f_quote_volume = ticker['quoteVolume'] and ticker['quoteVolume'] or 0
     f_ts_update = arrow.utcnow().timestamp
     v = [
         ex_id, symbol, f_ts, f_bid, f_bid_volume, f_ask, f_ask_volume,
         f_vwap, f_open, f_high, f_low, f_close, f_last, f_previous_close,
         f_change, f_percentage, f_average, f_base_volume, f_quote_volume,
         f_ts_update
     ]
     record = TupleRecord(schema=topic.record_schema)
     record.values = v
     i = random.randint(1, 100) % len(shards)
     record.shard_id = shards[i].shard_id
     fetch_base.__symbol_ex_ticker[symbol][ex_id] = {
         "f_ts": f_ts,
         "f_bid": f_bid,
         "f_ask": f_ask,
     }
     #await fetch_base.__queue_task_spread.put(v)
     records.append(record)
     return records
示例#13
0
    def test_build_record_with_invalid_value(self):
        record_schema = RecordSchema.from_lists(
            ['bigint_field', 'string_field', 'double_field', 'bool_field', 'time_field'],
            [FieldType.BIGINT, FieldType.STRING, FieldType.DOUBLE, FieldType.BOOLEAN, FieldType.TIMESTAMP])

        try:
            record = TupleRecord(schema=record_schema, values=['a', 'yc1', 10.01, True, 1455869335000000])
        except InvalidParameterException:
            pass
        else:
            raise Exception('build record success with invalid value!')

        try:
            record = TupleRecord(schema=record_schema,
                                 values=[-9223372036854775809, 'yc1', 10.01, True, 1455869335000000])
        except InvalidParameterException:
            pass
        else:
            raise Exception('build record success with invalid value!')

        try:
            record = TupleRecord(schema=record_schema,
                                 values=[9223372036854775808, 'yc1', 10.01, True, 1455869335000000])
        except InvalidParameterException:
            pass
        else:
            raise Exception('build record success with invalid value!')

        try:
            record = TupleRecord(schema=record_schema, values=['1', 'yc1', 'a', True, 1455869335000000])
        except InvalidParameterException:
            pass
        else:
            raise Exception('build record success with invalid value!')

        try:
            record = TupleRecord(schema=record_schema, values=[1, 'yc1', 10.01, 2, 1455869335000000])
        except InvalidParameterException:
            pass
        else:
            raise Exception('build record success with invalid value!')

        try:
            record = TupleRecord(schema=record_schema, values=[1, 'yc1', 10.01, True, -62135798400000001])
        except InvalidParameterException:
            pass
        else:
            raise Exception('build record success with invalid value!')

        try:
            record = TupleRecord(schema=record_schema, values=[1, 'yc1', 10.01, True, -253402271999000001])
        except InvalidParameterException:
            pass
        else:
            raise Exception('build record success with invalid value!')
示例#14
0
    print("get topic suc! topic=%s" % str(topic))
    if topic.record_type != RecordType.TUPLE:
        print("topic type illegal!")
        sys.exit(-1)
    print("=======================================\n\n")

    shards_result = dh.list_shard(project_name, topic_name)
    shards = shards_result.shards
    for shard in shards:
        print(shard)
    print("=======================================\n\n")

    while True:
        records = []

        record0 = TupleRecord(schema=topic.record_schema,
                              values=[1, 'yc1', 10.01, True, 1455869335000000])
        record0.shard_id = shards[0].shard_id
        record0.put_attribute('AK', '47')
        records.append(record0)

        record1 = TupleRecord(schema=topic.record_schema)
        record1.values = [1, 'yc1', 10.01, True, 1455869335000000]
        record1.shard_id = shards[1].shard_id
        records.append(record1)

        record2 = TupleRecord(schema=topic.record_schema)
        record2.set_value(0, 3)
        record2.set_value(1, 'yc3')
        record2.set_value('double_field', 10.03)
        record2.set_value('bool_field', False)
        record2.set_value('time_field', 1455869335000013)
示例#15
0
    async def run_fetch_ohlcv(self, ex_id, topic_name, symbols, timeframe_str,
                              since_ms, split_i, max_split_count):
        self.init_exchange(ex_id)
        if not self.exchanges[ex_id].has_api('fetchOHLCV'):
            logger.warn(self.to_string() +
                        "run_fetch_ohlcv({0}) NOT has interface".format(ex_id))
            return
        #logger.debug(self.to_string() + "run_fetch_ohlcv({0})".format(ex_id))
        '''
        if not self.exchanges[ex_id].ex.timeframes or timeframe_str not in self.exchanges[ex_id].ex.timeframes:
            logger.info(self.to_string() + "run_fetch_ohlcv({0}) NOT has timeframe={1}".format(ex_id, timeframe_str))
            return
        '''
        if not symbols or len(symbols) <= 0:
            symbols = [k for k in fetch_base.__ex_symbol_fee[ex_id].keys()]
        #logger.debug(self.to_string() + "run_fetch_ohlcv({0},{1},{2}) len(symbols)={3}".format(ex_id, topic_name, timeframe_str, len(symbols)))

        symbols_todu = []
        s = 0
        for symbol in symbols:
            if s % max_split_count == split_i:
                symbols_todu.append(symbol)
            s = s + 1
        #logger.debug(self.to_string() + "run_fetch_ohlcv({0},{1},{2}) len(symbols_todu)={3}".format(ex_id, topic_name, timeframe_str, len(symbols_todu)))
        if len(symbols_todu) <= 0:
            return

        topic, shards = g_datahub.get_topic(topic_name)
        f_timeframe = util.TimeFrame_Minutes[timeframe_str]
        while True:
            ts_start = arrow.utcnow().shift(
                minutes=-f_timeframe).timestamp * 1000
            i = 0
            for symbol in symbols_todu:
                try:
                    data = await self.exchanges[ex_id].fetch_ohlcv(
                        symbol, timeframe_str, since_ms)
                except ccxt.RequestTimeout:
                    logger.debug(
                        self.to_string() +
                        "run_fetch_ohlcv({0},{1},{2}) len(symbols_todu)={3} symbol={4} RequestTimeout "
                        .format(ex_id, topic_name, timeframe_str,
                                len(symbols_todu), symbol))
                    await asyncio.sleep(10)
                    continue
                except ccxt.DDoSProtection:
                    logger.debug(
                        self.to_string() +
                        "run_fetch_ohlcv({0},{1},{2}) len(symbols_todu)={3} symbol={4} RequestTimeout "
                        .format(ex_id, topic_name, timeframe_str,
                                len(symbols_todu), symbol))
                    await asyncio.sleep(10)
                    continue
                except Exception as e:
                    #logger.error(traceback.format_exc())
                    logger.debug(
                        self.to_string() +
                        "run_fetch_ohlcv({0},{1},{2}) len(symbols_todu)={3} Exception={4}"
                        .format(ex_id, topic_name, timeframe_str,
                                len(symbols_todu), e))
                    await asyncio.sleep(10)
                    continue
                f_ts_update = arrow.utcnow().timestamp
                #logger.debug(self.to_string() + "run_fetch_ohlcv() f_ts_update={0}".format(f_ts_update))
                records = []
                for d in data:
                    f_ts = d[0]
                    f_o = d[1]
                    f_h = d[2]
                    f_l = d[3]
                    f_c = d[4]
                    f_v = d[5]
                    record = TupleRecord(schema=topic.record_schema)
                    record.values = [
                        ex_id, symbol, f_timeframe, f_ts, f_o, f_h, f_l, f_c,
                        f_v, f_ts_update
                    ]
                    record.shard_id = shards[i % len(shards)].shard_id
                    records.append(record)
                    i = i + 1
                #logger.debug(self.to_string() + "run_fetch_ohlcv({0},{1},{2},{3})len(records) = {4}".format(ex_id, topic_name, symbol, timeframe_str, len(records)))
                g_datahub.pub_topic(topic_name, records)
                await asyncio.sleep(3)
            since_ms = ts_start
            await asyncio.sleep(3)
示例#16
0
    async def run_calc_spread(self, topic_name="t_spread"):
        #logger.debug(self.to_string() + "run_calc_spread()")
        topic, shards = g_datahub.get_topic(topic_name)
        shard_count = len(shards)
        while True:
            try:
                # 数据太多,处理不完
                qsize = fetch_base.__queue_task_spread.qsize()
                if qsize >= 100:
                    logger.warn(self.to_string() +
                                "run_calc_spread() qsize={0}".format(qsize))

                # for test
                await fetch_base.__queue_task_spread.get()
                continue

                # [f_ex_id, f_symbol, f_ts, f_bid, f_bid_volume, f_ask, f_ask_volume, f_vwap, f_open, f_high, f_low, f_close, f_last, f_previous_close, f_change, f_percentage, f_average, f_base_volume, f_quote_volume]
                task_record = await fetch_base.__queue_task_spread.get()
                symbol = task_record[1]
                ex1 = task_record[0]
                ex1_name = self.exchanges[ex1].ex.name
                ex1_bid = task_record[3]
                ex1_ask = task_record[5]
                ex1_ts = task_record[2]
                ex1_fee = fetch_base.__ex_symbol_fee[ex1][
                    symbol] if fetch_base.__ex_symbol_fee[ex1][symbol] else 0
                record2s = fetch_base.__symbol_ex_ticker[
                    symbol] if fetch_base.__symbol_ex_ticker[symbol] else {}
                records = []
                for ex2, v in record2s.items():
                    if ex2 == ex1:
                        continue
                    ex2_name = self.exchanges[ex2].ex.name
                    ex2_bid = v["f_bid"]
                    ex2_ask = v["f_ask"]
                    ex2_ts = v["f_ts"]
                    ex2_fee = fetch_base.__ex_symbol_fee[ex2][
                        symbol] if fetch_base.__ex_symbol_fee[ex2][
                            symbol] else 0
                    if abs(ex1_ts - ex2_ts) > 30000:
                        logger.info(
                            self.to_string() +
                            "run_calc_spread() abs(ex1_ts - ex2_ts)={0}".
                            format(abs(ex1_ts - ex2_ts)))
                        continue
                    spread_ts = ex1_ts if ex1_ts > ex2_ts else ex2_ts
                    f_fee = (ex1_bid * ex1_fee + ex2_ask * ex2_fee)

                    f_spread = ex1_bid - ex2_ask
                    f_profit = (f_spread - f_fee)
                    f_profit_p = (f_profit / ex1_bid) if ex1_bid > 0.0 else 0.0
                    f_ts_update = arrow.utcnow().timestamp
                    record1 = TupleRecord(schema=topic.record_schema)
                    record1.values = [
                        symbol, ex1, ex1_name, ex1_bid, ex1_ts, ex1_fee, ex2,
                        ex2_name, ex2_ask, ex2_ts, ex2_fee, spread_ts,
                        f_spread, f_fee, f_profit, f_profit_p, f_ts_update
                    ]
                    i = random.randint(1, 100) % shard_count
                    record1.shard_id = shards[i].shard_id
                    records.append(record1)

                    f_spread = ex2_bid - ex1_ask
                    f_profit = (f_spread - f_fee)
                    f_profit_p = (f_profit / ex2_bid) if ex2_bid > 0.0 else 0.0
                    record2 = TupleRecord(schema=topic.record_schema)
                    record2.values = [
                        symbol, ex2, ex2_name, ex2_bid, ex2_ts, ex2_fee, ex1,
                        ex1_name, ex1_ask, ex1_ts, ex1_fee, spread_ts,
                        f_spread, f_fee, f_profit, f_profit_p, f_ts_update
                    ]
                    i = random.randint(1, 100) % shard_count
                    record2.shard_id = shards[i].shard_id
                    records.append(record2)
                g_datahub.pub_topic(topic_name, records)
            except DatahubException as e:
                logger.error(traceback.format_exc(e))
            except Exception as e:
                logger.error(traceback.format_exc(e))
            except:
                logger.error(traceback.format_exc())
示例#17
0
    async def fetch_tickers(self, ex_id, topic, shards):
        # 降低 CPU ,暂时性
        await asyncio.sleep(30)

        self.init_exchange(ex_id)
        #logger.debug(self.to_string() + "fetch_tickers({0})".format(ex_id))
        records = []
        if not self.exchanges[ex_id].has_api('fetchTickers'):
            for symbol in fetch_base.__ex_symbol_fee[ex_id].keys():
                # 降低 CPU ,暂时性
                await asyncio.sleep(3)

                try:
                    rs = await self.fetch_ticker(ex_id, topic, shards, symbol)
                    records.extend(rs)
                except ccxt.RequestTimeout:
                    #logger.error(traceback.format_exc())
                    logger.info(
                        self.to_string() +
                        "fetch_tickers() fetch_ticker({0},{1}) RequestTimeout "
                        .format(ex_id, symbol))
                    await asyncio.sleep(10)
                except ccxt.DDoSProtection:
                    #logger.error(traceback.format_exc())
                    logger.info(
                        self.to_string() +
                        "fetch_tickers() fetch_ticker({0},{1}) DDoSProtection "
                        .format(ex_id, symbol))
                    await asyncio.sleep(10)
                except Exception as e:
                    #logger.error(traceback.format_exc())
                    logger.error(
                        self.to_string() +
                        "fetch_tickers() fetch_ticker({0},{1}) Exception={2}".
                        format(ex_id, symbol, e))
                    await asyncio.sleep(10)
                except:
                    logger.error(traceback.format_exc())
                    logger.error(self.to_string() +
                                 "fetch_tickers() fetch_ticker({0},{1})".
                                 format(ex_id, symbol))
                    await asyncio.sleep(10)
            logger.debug(self.to_string() +
                         "fetch_tickers({0}) len(records)={1}".format(
                             ex_id, len(records)))
            return records
        tickers = await self.exchanges[ex_id].ex.fetch_tickers()
        i = 0
        for symbol, ticker in tickers.items():
            f_ts = ticker['timestamp'] and ticker['timestamp'] or int(
                arrow.utcnow().timestamp * 1000)
            f_bid = ticker['bid'] and ticker['bid'] or 0
            f_bid_volume = ticker['bidVolume'] and ticker['bidVolume'] or 0
            f_ask = ticker['ask'] and ticker['ask'] or 0
            f_ask_volume = ticker['askVolume'] and ticker['askVolume'] or 0
            f_vwap = ticker['vwap'] and ticker['vwap'] or 0
            f_open = ticker['open'] and ticker['open'] or 0
            f_high = ticker['high'] and ticker['high'] or 0
            f_low = ticker['low'] and ticker['low'] or 0
            f_close = ticker['close'] and ticker['close'] or 0
            f_last = ticker['last'] and ticker['last'] or 0
            f_previous_close = ticker['previousClose'] and ticker[
                'previousClose'] or 0
            f_change = ticker['change'] and ticker['change'] or 0
            f_percentage = ticker['percentage'] and ticker['percentage'] or 0
            f_average = ticker['average'] and ticker['average'] or 0
            f_base_volume = ticker['baseVolume'] and ticker['baseVolume'] or 0
            f_quote_volume = ticker['quoteVolume'] and ticker[
                'quoteVolume'] or 0
            f_ts_update = arrow.utcnow().timestamp
            v = [
                ex_id, symbol, f_ts, f_bid, f_bid_volume, f_ask, f_ask_volume,
                f_vwap, f_open, f_high, f_low, f_close, f_last,
                f_previous_close, f_change, f_percentage, f_average,
                f_base_volume, f_quote_volume, f_ts_update
            ]
            record = TupleRecord(schema=topic.record_schema)
            record.values = v
            record.shard_id = shards[i % len(shards)].shard_id
            records.append(record)
            i = i + 1
            fetch_base.__symbol_ex_ticker[symbol][ex_id] = {
                "f_ts": f_ts,
                "f_bid": f_bid,
                "f_ask": f_ask,
            }
            #await fetch_base.__queue_task_spread.put(v)
        logger.debug(
            self.to_string() +
            "fetch_tickers({0}) len(records)={1}".format(ex_id, len(records)))
        return records
示例#18
0
                'bigint_field', 'string_field', 'double_field', 'bool_field',
                'time_field'
            ], [
                FieldType.BIGINT, FieldType.STRING, FieldType.DOUBLE,
                FieldType.BOOLEAN, FieldType.TIMESTAMP
            ])
            try:
                dh.create_topic(topic)
                print "create topic %s success!" % topic_name
                # block等待所有shard状态ready
                dh.wait_shards_ready(project_name, topic_name)
                print "shards all ready!!!"
                shards = dh.list_shards(project_name, topic_name)
                for shard in shards:
                    record = TupleRecord(
                        schema=topic.record_schema,
                        values=[1, 'yc1', 10.01, True, 1455869335000000])
                    record.shard_id = shard.shard_id
                    record.put_attribute('AK', '47')
                    records = []
                    records.append(record)
                    failed_indexs = dh.put_records(project_name, topic_name,
                                                   records)
                    print "put record to project:%s topic:%s failed_index:%s" % (
                        project_name, topic_name, failed_indexs)
            except ObjectAlreadyExistException, e:
                print "topic %s already exist!" % topic_name
            print "=======================================\n\n"
except Exception, e:
    print traceback.format_exc()
    sys.exit(-1)
示例#19
0
    def test_put_tuple_record_success(self):
        project_name = 'put'
        topic_name = 'success'
        record_schema = RecordSchema.from_lists([
            'bigint_field', 'string_field', 'double_field', 'bool_field',
            'time_field'
        ], [
            FieldType.BIGINT, FieldType.STRING, FieldType.DOUBLE,
            FieldType.BOOLEAN, FieldType.TIMESTAMP
        ])

        records = []
        record0 = TupleRecord(
            schema=record_schema,
            values=[1, 'yc1', 10.01, True, 253402271999000000])
        record0.shard_id = '0'
        record0.shard_id = '0'
        records.append(record0)

        record1 = TupleRecord(schema=record_schema)
        record1.values = [
            -9223372036854775808, 'yc1', 10.01, True, -62135798400000000
        ]
        record1.hash_key = '4FFFFFFFFFFFFFFD7FFFFFFFFFFFFFFD'
        records.append(record1)

        record2 = TupleRecord(
            schema=record_schema,
            values=[9223372036854775807, 'yc1', 10.01, True, 1455869335000000])
        record2.set_value(0, 9223372036854775807)
        record2.set_value('string_field', 'yc1')
        record2.partition_key = 'TestPartitionKey'
        records.append(record2)

        def check(request):
            assert request.method == 'POST'
            assert request.url == 'http://endpoint/projects/put/topics/success/shards'

        with HTTMock(gen_mock_api(check)):
            put_result = dh.put_records(project_name, topic_name, records)

        assert put_result.failed_record_count == 0
        assert put_result.failed_records == []
示例#20
0
    topic = dh.get_topic(topic_name, project_name)
    print "get topic suc! topic=%s" % str(topic)
    if topic.record_type != RecordType.TUPLE:
        print "topic type illegal!"
        sys.exit(-1)
    print "=======================================\n\n"

    shards = dh.list_shards(project_name, topic_name)
    for shard in shards:
        print shard
    print "=======================================\n\n"

    records = []

    record0 = TupleRecord(schema=topic.record_schema,
                          values=[1, 'yc1', 10.01, True, 1455869335000000])
    record0.shard_id = shards[0].shard_id
    record0.put_attribute('AK', '47')
    records.append(record0)

    record1 = TupleRecord(schema=topic.record_schema)
    record1['bigint_field'] = 2
    record1['string_field'] = 'yc2'
    record1['double_field'] = 10.02
    record1['bool_field'] = False
    record1['event_time1'] = 1455869335000011
    record1.shard_id = shards[1].shard_id
    records.append(record1)
    records.append(record1)

    record2 = TupleRecord(schema=topic.record_schema)
示例#21
0
    def test_set_value_out_of_range(self):
        project_name = 'put'
        topic_name = 'out_of_range'
        record_schema = RecordSchema.from_lists([
            'tinyint_field', 'smallint_field', 'integer_field', 'bigint_field',
            'string_field', 'float_field', 'double_field', 'bool_field',
            'timestamp_field', 'decimal_field'
        ], [
            FieldType.TINYINT, FieldType.SMALLINT, FieldType.INTEGER,
            FieldType.BIGINT, FieldType.STRING, FieldType.FLOAT,
            FieldType.DOUBLE, FieldType.BOOLEAN, FieldType.TIMESTAMP,
            FieldType.DECIMAL
        ], [False, True, True, True, True, True, True, True, True, True])

        record = TupleRecord(schema=record_schema,
                             values=[
                                 127, 32767, 2147483647, 9223372036854775807,
                                 'yc1', 1.1, 10.01, True, 253402271999000000,
                                 decimal.Decimal('12.2')
                             ])
        record = TupleRecord(schema=record_schema,
                             values=[
                                 -128, -32768, -2147483648,
                                 -9223372036854775808, 'yc1', 1.1, 10.01, True,
                                 -62135798400000000,
                                 decimal.Decimal('12.2')
                             ])
        try:
            record = TupleRecord(schema=record_schema,
                                 values=[
                                     128, 0, 0, 0, 'yc1', 10.01, True, 0,
                                     decimal.Decimal('12.2')
                                 ])
        except InvalidParameterException:
            pass
        else:
            raise Exception('set value out of range success!')

        try:
            record = TupleRecord(schema=record_schema,
                                 values=[
                                     -129, 0, 0, 0, 'yc1', 10.01, True, 0,
                                     decimal.Decimal('12.2')
                                 ])

        except InvalidParameterException:
            pass
        else:
            raise Exception('set value out of range success!')

        try:
            record = TupleRecord(schema=record_schema,
                                 values=[
                                     0, 32768, 0, 0, 'yc1', 10.01, True, 0,
                                     decimal.Decimal('12.2')
                                 ])

        except InvalidParameterException:
            pass
        else:
            raise Exception('set value out of range success!')

        try:
            record = TupleRecord(schema=record_schema,
                                 values=[
                                     0, -32769, 0, 0, 'yc1', 10.01, True, 0,
                                     decimal.Decimal('12.2')
                                 ])

        except InvalidParameterException:
            pass
        else:
            raise Exception('set value out of range success!')

        try:
            record = TupleRecord(schema=record_schema,
                                 values=[
                                     0, 0, 2147483648, 0, 'yc1', 10.01, True,
                                     0,
                                     decimal.Decimal('12.2')
                                 ])

        except InvalidParameterException:
            pass
        else:
            raise Exception('set value out of range success!')

        try:
            record = TupleRecord(schema=record_schema,
                                 values=[
                                     0, 0, -2147483649, 0, 'yc1', 10.01, True,
                                     0,
                                     decimal.Decimal('12.2')
                                 ])

        except InvalidParameterException:
            pass
        else:
            raise Exception('set value out of range success!')

        try:
            record = TupleRecord(schema=record_schema,
                                 values=[
                                     0, 0, 0, 9223372036854775808, 'yc1',
                                     10.01, True, 0,
                                     decimal.Decimal('12.2')
                                 ])

        except InvalidParameterException:
            pass
        else:
            raise Exception('set value out of range success!')

        try:
            record = TupleRecord(schema=record_schema,
                                 values=[
                                     0, 0, 0, -9223372036854775809, 'yc1',
                                     10.01, True, 0,
                                     decimal.Decimal('12.2')
                                 ])

        except InvalidParameterException:
            pass
        else:
            raise Exception('set value out of range success!')

        try:
            record = TupleRecord(schema=record_schema,
                                 values=[
                                     0, 0, 0, 0, 'yc1', 10.01, True,
                                     253402271999000001,
                                     decimal.Decimal('12.2')
                                 ])

        except InvalidParameterException:
            pass
        else:
            raise Exception('set value out of range success!')

        try:
            record = TupleRecord(schema=record_schema,
                                 values=[
                                     0, 0, 0, 0, 'yc1', 10.01, True,
                                     -62135798400000001,
                                     decimal.Decimal('12.2')
                                 ])

        except InvalidParameterException:
            pass
        else:
            raise Exception('set value out of range success!')
def handler(event, context):
    logger = logging.getLogger()

    evt = json.loads(event)
    #print("[print1]IoT trigger and send data to FunctionCompute test output, The content of event is : %s" % (evt))

    timestamp = evt['timestamp']
    values = evt['values']
    count_of_value = len(values)

    ACCESS_ID = 'XXXXX'
    ACCESS_KEY = 'XXXXX'
    ENDPOINT = 'http://dh-cn-XXXXX.aliyun-inc.com'
    dh = DataHub(ACCESS_ID, ACCESS_KEY, ENDPOINT)

    PROJECT_NAME = 'veolia_d4b_poc'
    TOPIC_NAME = 'extract_result_table'

    # ===================== put tuple records =====================
    # block等待所有shard状态ready
    dh.wait_shards_ready(PROJECT_NAME, TOPIC_NAME)

    topic = dh.get_topic(PROJECT_NAME, TOPIC_NAME)
    record_schema = topic.record_schema

    shards_result = dh.list_shard(PROJECT_NAME, TOPIC_NAME)
    shards = shards_result.shards
    shard_count = len(shards)
    #   for shard in shards:
    #     print("[print8]IoT trigger and send data to FunctionCompute test output, The Shard is : (%s)" % (shard))

    records = []

    for value in values:
        # id sample: SE433_OPC.S01.AISA0101
        id = value['id']
        id_list = id.split('.')
        id_company_code = (id_list[0].split('_'))[0]
        id_protocol_name = (id_list[0].split('_'))[1]
        id_system_code = id_list[1]
        id_tagname = id_list[2]

        v = value['v']
        q = 'true' if value['q'] else 'false'
        t = value['t']
        #print("[print7]IoT trigger and send data to FunctionCompute test output, The value is : (%s, %s, %s, %s)" % (id,v,q,t))

        rec = TupleRecord(schema=topic.record_schema)
        rec.values = [
            timestamp, id_company_code, id_protocol_name, id_system_code,
            id_tagname, v, q, t
        ]
        rec.shard_id = shards[random.randint(0, shard_count - 1)].shard_id
        records.append(rec)

    failed_indexs = dh.put_records(PROJECT_NAME, TOPIC_NAME, records)
    print("[print9] put tuple %d records, shard_id = %s, failed list: %s" %
          (len(records), rec.shard_id, failed_indexs))
    # failed_indexs如果非空最好对failed record再进行重试

    return 'success'


# event样例:
# {
# 	"timestamp":1521698375065,
# 	"values":[
# 		{
# 			"id":"SE433_OPC.S01.IW1440",
# 			"v":206,
# 			"q":true,
# 			"t":1521698358299
# 		},
# 		{
# 			"id":"SESE433_OPC433.S01.LCV1414_ACT",
# 			"v":42,
# 			"q":true,
# 			"t":1521698358222
# 		},
# 		{
# 			"id":"SE433_OPC.S01.LT1430A",
# 			"v":22,
# 			"q":true,
# 			"t":1521698358235
# 		},
# 		…
# 	]
# }
示例#23
0
def handler(event, context):
    logger.debug(event)
    str = event.decode()
    if str == "":
        logger.debug("str == \"\"")
        return
    evt = json.loads(str)
    if not evt:
        logger.debug("evt is None")
        return
    records = evt.get("records")
    if not records:
        logger.debug("records is None")
        return
    logger.debug("len(records)={0}".format(len(records)))
    logger.debug(records)
    for record in records:
        data = record["data"]
        ex1 = data[0]
        symbol = data[1]
        ex1_ts = data[2]
        ex1_bid = data[3]
        ex1_ask = data[5]
        sql_select = "SELECT f_ex_id,f_bid,f_ask,f_ts FROM t_ticker_crrent where f_symbol=\"{0}\" and f_ex_id!=\"{1}\" and f_ts > (UNIX_TIMESTAMP() - 30) * 1000;".format(
            symbol, ex1)
        logger.debug("sql_select={0}".format(sql_select))
        cursor_reaed = conn_read.cursor()
        cursor_reaed.execute(sql_select)
        rows = cursor_reaed.fetchall()
        logger.debug("rows={0}".format(len(rows)))
        if len(rows) <= 0:
            logger.debug("len(rows) <= 0")
            continue
        records = []
        sql_insert = "replace into t_spread_current(f_symbol,f_ex1,f_ex2,f_spread,f_ts) values"
        c = 0
        cursor_write = conn_write.cursor()
        for row in rows:
            ex2 = row[0]
            ex2_bid = row[1]
            ex2_ask = row[2]
            ex2_ts = row[3]
            spread_ts = ex1_ts if ex1_ts > ex2_ts else ex2_ts
            sql_value1 = "('{0}','{1}','{2}',{3},{4})".format(
                symbol, ex1, ex2, ex1_bid - ex2_ask, spread_ts)
            sql_value2 = ",('{0}','{1}','{2}',{3},{4})".format(
                symbol, ex2, ex1, ex2_bid - ex1_ask, spread_ts)
            if c == 0:
                sql_insert = sql_insert + sql_value1 + sql_value2
            else:
                sql_insert = sql_insert + "," + sql_value1 + sql_value2
            c = c + 1

            record1 = TupleRecord(schema=topic.record_schema)
            record1.values = [symbol, ex1, ex2, ex1_bid - ex2_ask, spread_ts]
            record1.shard_id = shards[c % len(shards)].shard_id
            records.append(record1)
            c = c + 1

            record2 = TupleRecord(schema=topic.record_schema)
            record2.values = [symbol, ex2, ex1, ex2_bid - ex1_ask, spread_ts]
            record2.shard_id = shards[c % len(shards)].shard_id
            records.append(record2)

        sql_insert = sql_insert + ";"
        logger.debug("sql_insert={0}".format(sql_insert))
        count_write = cursor_write.execute(sql_insert)
        logger.debug(count_write)
        count_write.commit()

        logger.debug("datahub.put_records={0}".format(len(records)))
        datahub.put_records(project_name, topic_name, records)
    # block等待所有shard状态ready
    dh.wait_shards_ready(project_name, tuple_topic_name)
    print("shards all ready!!!")
    print("=======================================\n\n")

    topic_result = dh.get_topic(project_name, "tuple_topic")
    print(topic_result)
    if topic_result.record_type != RecordType.TUPLE:
        print("topic type illegal!")
        sys.exit(-1)
    print("=======================================\n\n")

    record_schema = topic_result.record_schema

    records0 = []
    record0 = TupleRecord(schema=record_schema,
                          values=[1, 'yc1', 10.01, True, 1455869335000000])
    record0.shard_id = '0'
    record0.put_attribute('AK', '47')
    records0.append(record0)

    record1 = TupleRecord(schema=record_schema)
    record1.set_value('bigint_field', 2)
    record1.set_value('string_field', 'yc2')
    record1.set_value('double_field', None)
    record1.set_value('bool_field', False)
    record1.set_value('time_field', 1455869335000011)
    record1.hash_key = '4FFFFFFFFFFFFFFD7FFFFFFFFFFFFFFD'
    records0.append(record1)

    record2 = TupleRecord(schema=record_schema)
    record2.set_value(0, 3)
示例#25
0
    def test_put_tuple_record_pb_success(self):
        project_name = 'put'
        topic_name = 'success'
        record_schema = RecordSchema.from_lists([
            'bigint_field', 'string_field', 'double_field', 'bool_field',
            'time_field'
        ], [
            FieldType.BIGINT, FieldType.STRING, FieldType.DOUBLE,
            FieldType.BOOLEAN, FieldType.TIMESTAMP
        ])

        records = []
        record0 = TupleRecord(
            schema=record_schema,
            values=[1, 'yc1', 10.01, True, 253402271999000000])
        record0.shard_id = '0'
        record0.shard_id = '0'
        records.append(record0)

        record1 = TupleRecord(schema=record_schema,
                              values=[
                                  -9223372036854775808, 'yc1', 10.01, True,
                                  -62135798400000000
                              ])
        record1.hash_key = '4FFFFFFFFFFFFFFD7FFFFFFFFFFFFFFD'
        records.append(record1)

        record2 = TupleRecord(
            schema=record_schema,
            values=[9223372036854775807, 'yc1', 10.01, True, 1455869335000000])
        record2.partition_key = 'TestPartitionKey'
        records.append(record2)

        def check(request):
            assert request.method == 'POST'
            assert request.url == 'http://endpoint/projects/put/topics/success/shards'
            crc, compute_crc, pb_str = unwrap_pb_frame(request.body)
            pb_put_record_request = PutRecordsRequest()
            pb_put_record_request.ParseFromString(pb_str)
            assert len(pb_put_record_request.records) == 3
            assert len(pb_put_record_request.records[0].data.data) == 5
            assert pb_put_record_request.records[0].data.data[0].value == b'1'
            assert pb_put_record_request.records[0].data.data[
                1].value == b'yc1'
            assert pb_put_record_request.records[0].data.data[
                2].value == b'10.01'
            assert pb_put_record_request.records[0].data.data[
                3].value == b'true'
            assert pb_put_record_request.records[0].data.data[
                4].value == b'253402271999000000'

            assert len(pb_put_record_request.records[1].data.data) == 5
            assert pb_put_record_request.records[1].data.data[
                0].value == b'-9223372036854775808'
            assert pb_put_record_request.records[1].data.data[
                1].value == b'yc1'
            assert pb_put_record_request.records[1].data.data[
                2].value == b'10.01'
            assert pb_put_record_request.records[1].data.data[
                3].value == b'true'
            assert pb_put_record_request.records[1].data.data[
                4].value == b'-62135798400000000'

            assert len(pb_put_record_request.records[2].data.data) == 5
            assert pb_put_record_request.records[2].data.data[
                0].value == b'9223372036854775807'
            assert pb_put_record_request.records[2].data.data[
                1].value == b'yc1'
            assert pb_put_record_request.records[2].data.data[
                2].value == b'10.01'
            assert pb_put_record_request.records[2].data.data[
                3].value == b'true'
            assert pb_put_record_request.records[2].data.data[
                4].value == b'1455869335000000'

        with HTTMock(gen_pb_mock_api(check)):
            put_result = dh2.put_records(project_name, topic_name, records)

        assert put_result.failed_record_count == 0
        assert put_result.failed_records == []
示例#26
0
    try:
        dh.create_topic(topic)
        print "create topic %s success!" % args.project
        print "=======================================\n\n"
    except ObjectAlreadyExistException, e:
        print "topic %s already exist!" % args.project
        print "=======================================\n\n"

    write_request_count = 0
    write_suc_reord_count = 0
    write_fail_record_count = 0
    with Timer() as t:
        for i in range(0, args.round):
            records = []
            for j in range(0, args.batch):
                record = TupleRecord(schema=record_schema)
                #                record['bigint_field'] = 2
                record['string_field'] = data
                #                record['double_field'] = 10.02
                #                record['bool_field'] = False
                #                record['time_field'] = 1455869335000011
                record.shard_id = '0'
                records.append(record)
            fail_records = dh.put_records(args.project, args.topic, records)
            write_request_count += 1
            write_suc_reord_count += (len(records) - len(fail_records))
            write_fail_record_count += len(fail_records)
            #print "%d put %d records, fail %d" %(write_request_count, len(records), len(fail_records))

    print "===============result=================="
    print "write_request_count:%d, %f/s" % (write_request_count,
示例#27
0
    try:
        dh.create_tuple_topic(args.project, args.topic, 1, 7, record_schema,
                              'perf test')
    except ResourceExistException as e:
        print("topic %s already exist!" % args.topic)
        print("=======================================\n\n")

    write_request_count = 0
    write_suc_reord_count = 0
    write_fail_record_count = 0

    with Timer() as t:
        for i in range(0, args.round):
            records = []
            for j in range(0, args.batch):
                record = TupleRecord(schema=record_schema)
                #                record['bigint_field'] = 2
                record.set_value('string_field', data)
                #                record['double_field'] = 10.02
                #                record['bool_field'] = False
                #                record['time_field'] = 1455869335000011
                record.shard_id = '0'
                records.append(record)
            put_result = dh.put_records(args.project, args.topic, records)

            write_request_count += 1
            write_suc_reord_count += (len(records) -
                                      put_result.failed_record_count)
            write_fail_record_count += put_result.failed_record_count
            # print "%d put %d records, fail %d" %(write_request_count, len(records), len(fail_records))