Esempio n. 1
0
import logging
import os
import pprint
import pickle
from pathlib import Path

import numpy as np
import zmq

from data_class import opunit_data
from mini_trainer import MiniTrainer
from util import logging_util
from type import OpUnit
from info import data_info

logging_util.init_logging('info')


class Callback(IntEnum):
    """
    ModelServerManager <==> ModelServer callback Id.
    Needs to be kept consistent with ModelServerManager.h's Callback Enum
    """
    NOOP = 0
    CONNECTED = 1


class Command(Enum):
    """
    Command enum for actions to take from the manager.
    This has to be kept consistent with the C++ ModelServerManager.
Esempio n. 2
0
                         help='OLTPBench warmup period')
    aparser.add_argument('--tpcc_hack',
                         default=False,
                         help='Should do feature correction for TPCC')
    aparser.add_argument('--ee_sample_interval',
                         type=int,
                         default=9,
                         help='Sampling interval for the execution engine OUs')
    aparser.add_argument('--txn_sample_interval',
                         type=int,
                         default=0,
                         help='Sampling interval for the transaction OUs')
    aparser.add_argument('--log', default='info', help='The logging level')
    args = aparser.parse_args()

    logging_util.init_logging(args.log)

    logging.info("Global trainer starts.")

    with open(args.mini_model_file, 'rb') as pickle_file:
        model_map = pickle.load(pickle_file)
    trainer = GlobalTrainer(args.input_path, args.model_results_path,
                            args.ml_models, args.test_ratio,
                            args.impact_model_ratio, model_map,
                            args.warmup_period, args.tpcc_hack,
                            args.ee_sample_interval, args.txn_sample_interval)
    resource_model, impact_model, direct_model = trainer.train()
    with open(args.save_path + '/global_resource_model.pickle', 'wb') as file:
        pickle.dump(resource_model, file)
    with open(args.save_path + '/global_impact_model.pickle', 'wb') as file:
        pickle.dump(impact_model, file)
from Constants import mem_per_1vCPU
from TranslationService import translate, genitive_case
from util.constants_util import get_logs_root_path, get_fn_tag
from util.file_util import read_file, read_csv_to_dataframe, write_to_csv
from util.subprocess_util import run_executable
from util.logging_util import log, init_logging

env_config = dotenv_values()

REPOSITORY_NAME = env_config["AWS_ECR_REPOSITORY_NAME"]

describe_image_exec_path = './script/aws/environment/describe-image.sh'

results_header = ["image_size_mb", "memory_size_mb", "init_duration_ms", "artificial_init_duration", "duration", "available_threads"]

init_logging()


parser = argparse.ArgumentParser(description="Results Parser")
parser.add_argument("-t", "--test", type=str)


def main():
    args = parser.parse_args()
    test_num = args.test

    parse_logs(test_num)


def parse_logs(test_num):
    log(f'Parsing logs for test: {test_num}')