def test_create_database(self, db_config):

        create_script = os.environ[
            'ITHEMAL_HOME'] + '/data_export/scripts/create_and_populate_db.sh'
        schema = os.environ[
            'ITHEMAL_HOME'] + '/data_export/schemas/mysql_schema.sql'

        proc = subprocess.call([
            'bash', create_script, 'test_data/db_config.cfg', 'testIthemal',
            schema, 'test_data'
        ])
        #_ = proc.communicate()

        cnx = ut.create_connection(user=db_config['user'],
                                   password=db_config['password'],
                                   port=db_config['port'],
                                   database='testIthemal')
        assert cnx != None

        sql = 'select count(*) from code'

        rows = ut.execute_query(cnx, sql, True)

        assert len(rows) == 1
        assert len(rows[0]) == 1

        assert rows[0][0] == 3287
Beispiel #2
0
    def test_connectivity(self,db_config):

        assert 'password' in db_config.keys()
        assert 'user' in db_config.keys()
        assert 'port' in db_config.keys()

        cnx = ut.create_connection(user=db_config['user'],password=db_config['password'],port=db_config['port'],database=None)
        assert cnx != None
Beispiel #3
0
def save_data(savefile, arch, format, database=None, config=None):
    # type: (str, int, str, Optional[str], Optional[str]) -> None

    if config is None:
        cnx = ut.create_connection(database=database)
    else:
        cnx = ut.create_connection_from_config(database=database, config_file=config)

    data = dt.DataInstructionEmbedding()
    data.extract_data(cnx, format, ['code_id','code_intel'])
    data.get_timing_data(cnx, arch)

    torch.save(data.raw_data, savefile)
Beispiel #4
0
def predict_compare(args, model, data, verbose):
    cnx = ut.create_connection(database=args.database,
                               user=args.user,
                               password=args.password,
                               port=args.port)
    sql = 'SELECT code_id, code_xml from ' + args.ctable
    rows = ut.execute_query(cnx, sql, True)

    try:
        f = open('results.csv', 'w')
        f.write('predicated, actual, accuracy\n')
    except Exception as e:
        print(e)
        print('cannot create result file!\n')

    for row in rows:
        if row[0] == None or row[1] == None:
            continue

        try:
            data.raw_data = [(-1, -1, _fake_intel, row[1])]
            data.data = []
            data.prepare_data(fixed=True, progress=False)
            timePredict = model(data.data[-1]).item()
            #print(str(timePredict))

            sql = 'SELECT time_actual from ' + args.ttable + ' WHERE code_id = ' + str(
                row[0])
            timeActual = ut.execute_query(cnx, sql, True)
            if len(timeActual) > 1:
                #print(', '.join(timeActual))
                raise ValueError('code_id is not unique in time table')
            #print('code_id=%d\n',row[0])
            #print(timeActual)
            if timeActual[0][0] == -1:
                err = 1
            else:
                err = abs(timePredict - timeActual[0][0]) / timeActual[0][0]
            f.write(
                str(timePredict) + ', ' + str(timeActual[0][0]) + ', ' +
                str(err) + '\n')
        except Exception as e:
            print(e)
            print('exception occurred')
            print('code_id=' + str(row[0]))

    f.close()
    cnx.close()
Beispiel #5
0
def load_dataset(data_savefile=None, arch=None, format='text'):
    data = DataInstructionEmbedding()

    if data_savefile is None:
        if arch is None:
            raise ValueError('Must provide one of data_savefile or arch')

        cnx = ut.create_connection()
        data.extract_data(cnx, format, ['code_id', 'code_intel'])
        data.get_timing_data(cnx, arch)
    else:
        data.raw_data = torch.load(data_savefile)

    data.read_meta_data()
    data.prepare_data()
    data.generate_datasets()

    return data
Beispiel #6
0
def graph_model_validate(base_params, model_file, iaca_only):
    # type: (BaseParameters, str, bool) -> None
    data = load_data(base_params)
    if iaca_only:
        cnx = ut.create_connection()
        legal_code_ids = set(
            pd.read_sql('SELECT time_id, code_id FROM times WHERE kind="iaca"', cnx)
            .set_index('time_id')
            .code_id
        )
        data.test = [datum for datum in data.test if datum.code_id in legal_code_ids]
    model = load_model(base_params, data)

    train = tr.Train(
        model, data, tr.PredictionType.REGRESSION, ls.mse_loss, 1,
        batch_size=1000, clip=None, predict_log=base_params.predict_log,
    )

    resultfile = os.environ['ITHEMAL_HOME'] + '/learning/pytorch/results/realtime_results.txt'
    (actual, predicted) = train.validate(resultfile=resultfile, loadfile=model_file)
Beispiel #7
0
    parser = argparse.ArgumentParser()
    parser.add_argument('--arch', action='store', type=int, required=True)

    parser.add_argument('--database', action='store', type=str, required=True)
    parser.add_argument('--user', action='store', type=str, required=True)
    parser.add_argument('--password', action='store', type=str, required=True)
    parser.add_argument('--port', action='store', type=int, required=True)
    parser.add_argument('--ctable', action='store', type=str, required=True)
    parser.add_argument('--ttable', action='store', type=str, required=True)
    parser.add_argument('--limit', action='store', type=int, default=None)
    parser.add_argument('--tp', action='store', type=bool, default=False)

    args = parser.parse_args(sys.argv[1:])

    cnx = ut.create_connection(database=args.database,
                               user=args.user,
                               password=args.password,
                               port=args.port)

    try:
        f = open('codeTiming.csv', 'r')
    except Exception as e:
        print(e)
        print('cannot open block timing file!\n')
        assert False

    for ln in f:

        splitted = ln.split(',')
        codeId = splitted[0]

        names = [
Beispiel #8
0
    with open(os.path.join(os.environ['ITHEMAL_HOME'], 'gimp.txt'), 'r') as f:
        lines = f.readlines()
        for line in lines:
            gimp.add_programs(line.split())
    with open(os.path.join(os.environ['ITHEMAL_HOME'], 'firefox.txt'), 'r') as f:
        lines = f.readlines()
        for line in lines:
            firefox.add_programs(line.split())
    with open(os.path.join(os.environ['ITHEMAL_HOME'], 'linux.txt'), 'r') as f:
        lines = f.readlines()
        for line in lines:
            linux.add_programs(line.split())

            
    cnx = ut.create_connection(args.database)

    tsvc.add_programs(['runvec'])
    tsvc.add_programs(['runnovec'])
    polybench.add_programs(['2mm','3mm','atax','bicg','cholesky','doitgen','gemm','gemver','gesummv','mvt','symm','syrk','syr2k','trisolv', 'trmm', 'durbin','dynprog','gramschmidt','lu','ludcmp','correlation','covariance','floyd-warshall','reg_detect','adi','fdtd-2d','fdtd-apml','jacobi-1d-imper','jacobi-2d-imper','seidel-2d'])
    cortexsuite.add_programs(['sphinx','a.out','liblinear-tsmall'])
    spec2006.add_programs(['specxz'])
    clang.add_programs(['clang'])
    java.add_programs(['javaldx','libjvmfwklo.so'])
    openoffice.add_programs(['oosplash','soffice.bin','libuno_cppu.so.3','libuno_sal.so.3','libuno_salhelpergcc3.so.3','libvclplug_genlo.so','libmergedlo.so'])
    linux.add_programs(['echo','gedit','libm.so.6','libc.so.6','libgcc_s.so.1','ld-linux-x86-64.so.2','libpthread.so.0','linux-vdso.so.1','libdl.so.2','libglib-2.0.so.0','libselinux.so.1','libdbus-1.so.3','libclucene-shared.so.1','libz.so.1','libcom_err.so.2','libexpat.so.1','libudev.so.1','libtinfo.so.5','libclucene-core.so.1','libpng12.so.0','libcrypto.so.1.0.0','_glib.so','_gobject.so','_hashlib.x86_64-linux-gnu.so','libgpg-error.so.0'])
    games.add_programs(['sgt-mines'])
    gimp.add_programs([])
    rhythm.add_programs(['rhythmbox','libmpris.so','libaudioscrobbler.so','libmmkeys.so'])
    
    # gimp.add_programs(['sparkle','noise_spread','contrast-stretch','engrave','threshold-alpha','gradient-flare','crop-auto','shift','jigsaw','waves','curve-bend','tile-glass','blur-motion','noise-hsv','lens-flare','semi-flatten','blur-gauss-selective','animation-optimize','colors-exchange','lens-apply','nova','cartoon','border-average','sharpen','channel-mixer','pixelize','red-eye-removal','color-to-alpha','edge-laplace','lens-distortion','nl-filter','warp','blinds','edge-sobel','file-png','max-rgb','colorify','convolution-matrix','emboss','colormap-remap','noise-solid','noise-rgb','polar-coords','iwarp','tile','grid','despeckle','rotate','sample-colorize','tile-seamless','blur-gauss','noise-randomize','blur','noise-spread','map-object','contrast-normalize','noise-slur.so','gradient-map','smooth-palette','contrast-retinex','whirl-pinch','color-enhance','illusion','edge-neon','crop-zealous','lighting','mosaic','apply-canvas','edge','edge-dog','color-cube-analyze','deinterlace','wind','antialias','ripple'])
Beispiel #9
0
def execute_sql(commands):  # type: (List[str]) -> None
    cnx = ut.create_connection()
    for com in commands:
        ut.execute_query(cnx, com, False)
    cnx.commit()