Esempio n. 1
0
 def test_reader_us(self):
     with open(os.path.join(PATH, 'phout.dat')) as f:
         chunk = f.read()
     result = string_to_df_microsec(chunk)
     expected = pd.read_pickle(os.path.join(PATH, 'expected_df.dat'))
     result['ts'] -= result['ts'][0]
     assert result.equals(expected)
Esempio n. 2
0
 def test_reader_us(self):
     with open('yandextank/plugins/Phantom/tests/phout.dat') as f:
         chunk = f.read()
     result = string_to_df_microsec(chunk)
     expected = pd.read_pickle('yandextank/plugins/Phantom/tests/expected_df.dat')
     result['ts'] -= result['ts'][0]
     assert result.equals(expected)
Esempio n. 3
0
def main():
    parser = argparse.ArgumentParser(description='Process phantom output.')
    parser.add_argument('phout', type=str, help='path to phantom output file')
    parser.add_argument(
        '--url',
        type=str,
        default='https://volta-back-testing.common-int.yandex-team.ru/')
    parser.add_argument('--name',
                        type=str,
                        help='test name',
                        default=str(datetime.utcnow()))
    parser.add_argument('--db_name',
                        type=str,
                        help='ClickHouse database name',
                        default='luna_test')
    args = parser.parse_args()

    clients = [{
        'type': 'luna',
        'api_address': args.url,
        'db_name': args.db_name
    }, {
        'type': 'local_storage'
    }]
    data_session = DataSession({'clients': clients})
    data_session.update_job({'name': args.name})
    print('Test name: %s' % args.name)

    col_map_aggr = {
        name: 'metric %s' % name
        for name in [
            'interval_real', 'connect_time', 'send_time', 'latency',
            'receive_time', 'interval_event'
        ]
    }
    uploader = get_uploader(data_session, col_map_aggr, True)

    signal.signal(signal.SIGINT, get_handler(data_session))

    with open(args.phout) as f:
        buffer = ''
        while True:
            parts = f.read(128 * 1024)
            try:
                chunk, new_buffer = parts.rsplit('\n', 1)
                chunk = buffer + chunk + '\n'
                buffer = new_buffer
            except ValueError:
                chunk = buffer + parts
                buffer = ''
            if len(chunk) > 0:
                df = string_to_df_microsec(chunk)
                uploader(df)
            else:
                break
    data_session.close()