Exemplo n.º 1
0
def main():
    ####################################################################################################################
    # Setup
    # Get flag values
    embeddings = task_utils.get_embeddings()
    folder_results = FLAGS.out
    assert len(
        folder_results
    ) > 0, "Please specify a path to the results folder using --folder_results"
    folder_data = FLAGS.input_data
    dense_layer_size = FLAGS.dense_layer
    print_summary = FLAGS.print_summary
    num_epochs = FLAGS.num_epochs
    batch_size = FLAGS.batch_size
    train_samples = FLAGS.train_samples

    # Acquire data
    if not os.path.exists(os.path.join(folder_data, 'ir_train')):
        # Download data
        task_utils.download_and_unzip(
            'https://polybox.ethz.ch/index.php/s/JOBjrfmAjOeWCyl/download',
            'classifyapp_training_data', folder_data)

    task_utils.llvm_ir_to_trainable(os.path.join(folder_data, 'ir_train'))
    assert os.path.exists(os.path.join(
        folder_data, 'ir_val')), "Folder not found: " + folder_data + '/ir_val'
    task_utils.llvm_ir_to_trainable(os.path.join(folder_data, 'ir_val'))
    assert os.path.exists(os.path.join(
        folder_data,
        'ir_test')), "Folder not found: " + folder_data + '/ir_test'
    task_utils.llvm_ir_to_trainable(os.path.join(folder_data, 'ir_test'))

    # Create directories if they do not exist
    if not os.path.exists(folder_results):
        os.makedirs(folder_results)
    if not os.path.exists(os.path.join(folder_results, "models")):
        os.makedirs(os.path.join(folder_results, "models"))
    if not os.path.exists(os.path.join(folder_results, "predictions")):
        os.makedirs(os.path.join(folder_results, "predictions"))

    ####################################################################################################################
    # Train model
    # Evaluate Classifyapp
    print("\nEvaluating ClassifyappInst2Vec ...")
    if (not FLAGS.inference):
        classifyapp_accuracy = evaluate(NCC_classifyapp(), embeddings,
                                        folder_data, train_samples,
                                        folder_results, dense_layer_size,
                                        print_summary, num_epochs, batch_size)

        ####################################################################################################################
        # Print results
        print('\nTest accuracy:',
              sum(classifyapp_accuracy) * 100 / len(classifyapp_accuracy), '%')

    else:
        test_accuracy(NCC_classifyapp(), embeddings, folder_data,
                      train_samples, folder_results, dense_layer_size,
                      print_summary, num_epochs, batch_size)
def main(argv):
    del argv  # unused

    ####################################################################################################################
    # Setup
    # Get flag values
    folder_results = FLAGS.out
    assert len(
        folder_results
    ) > 0, "Please specify a path to the results folder using --folder_results"
    folder_data = FLAGS.input_data
    print_summary = FLAGS.print_summary
    num_epochs = FLAGS.num_epochs
    train_samples = FLAGS.train_samples

    # Acquire data
    if not os.path.exists(os.path.join(folder_data, 'ir_train')):
        # Download data
        task_utils.download_and_unzip(
            'https://polybox.ethz.ch/index.php/s/JOBjrfmAjOeWCyl/download',
            'classifyapp_training_data', folder_data)

    task_utils.llvm_ir_to_trainable(os.path.join(folder_data, 'ir_train'))
    assert os.path.exists(os.path.join(
        folder_data, 'ir_val')), "Folder not found: " + folder_data + '/ir_val'
    task_utils.llvm_ir_to_trainable(os.path.join(folder_data, 'ir_val'))
    assert os.path.exists(os.path.join(
        folder_data,
        'ir_test')), "Folder not found: " + folder_data + '/ir_test'
    task_utils.llvm_ir_to_trainable(os.path.join(folder_data, 'ir_test'))

    # Create directories if they do not exist
    if not os.path.exists(folder_results):
        os.makedirs(folder_results)
    if not os.path.exists(os.path.join(folder_results, "models")):
        os.makedirs(os.path.join(folder_results, "models"))
    if not os.path.exists(os.path.join(folder_results, "predictions")):
        os.makedirs(os.path.join(folder_results, "predictions"))

    ####################################################################################################################
    # Train model
    # Evaluate Classifyapp
    print("\nEvaluating ClassifyappInst2Vec ...")
    evaluate(Engine(FLAGS), folder_data, train_samples, folder_results,
             print_summary, num_epochs)
Exemplo n.º 3
0
import struct
import pickle
import os
import re
import wget
import zipfile
import rgx_utils as rgx
import task_utils as utils

utils.download_and_unzip('https://samate.nist.gov/SRD/testsuites/juliet/Juliet_Test_Suite_v1.3_for_C_Cpp.zip','Juliet Test Suite Dataset','data')
try:
    os.makedirs('pretrained')
except FileExistsError:
    pass

Exemplo n.º 4
0
def main(argv):
    del argv  # unused

    ####################################################################################################################
    # Setup
    # Get flag values
    embeddings = task_utils.get_embeddings()
    input_data = FLAGS.input_data
    out = FLAGS.out
    if not os.path.exists(out):
        os.makedirs(out)
    device = FLAGS.device
    assert device in ["all", "Cypress", "Tahiti", "Fermi", "Kepler"], \
        'Choose device among: all, Cypress, Tahiti, Fermi, Kepler'
    dense_layer_size = FLAGS.dense_layer
    print_summary = FLAGS.print_summary
    num_epochs = FLAGS.num_epochs
    batch_size = FLAGS.batch_size
    if not os.path.exists(os.path.join(input_data, 'kernels_ir')):

        # Download data
        task_utils.download_and_unzip(
            'http://spclstorage.inf.ethz.ch/projects/ncc/tasks/threadcoarsening_data.zip',
            'threadcoarsening_training_data', input_data)

    task_utils.llvm_ir_to_trainable(os.path.join(input_data, 'kernels_ir'))

    ####################################################################################################################
    # Reference values
    # Values copied from papers and github
    magni_pl_sp_vals = [1.21, 1.01, 0.86, 0.94]
    magni_sp_mean = 1.005
    deeptune_pl_sp_vals = [1.10, 1.05, 1.10, 0.99]
    deeptune_sp_mean = 1.06
    deeptuneTL_pl_sp_vals = [1.17, 1.23, 1.14, 0.93]
    deeptuneTL_sp_mean = 1.1175

    ####################################################################################################################
    # Train model
    # Evaluate NCC_threadcoarsening
    print("\nEvaluating NCC_threadcoarsening ...")
    ncc_threadcoarsening = evaluate(NCC_threadcoarsening(), device, input_data,
                                    out, embeddings, dense_layer_size,
                                    print_summary, num_epochs, batch_size)

    ####################################################################################################################
    # Print results
    print(
        '\n',
        ncc_threadcoarsening.groupby('Platform')['Platform', 'Speedup',
                                                 'Oracle'].mean())
    d = np.array([ncc_threadcoarsening[['Speedup', 'Oracle']].mean()]).T
    print(
        '\n',
        pd.DataFrame(d,
                     columns=["DeepTuneInst2Vec"],
                     index=["Speedup", "Oracle"]))

    # Model comparison: speedups
    print('\nModel comparison: speedups')
    d = list()
    d.append(np.append(magni_pl_sp_vals, magni_sp_mean))
    d.append(np.append(deeptune_pl_sp_vals, deeptune_sp_mean))
    d.append(np.append(deeptuneTL_pl_sp_vals, deeptuneTL_sp_mean))
    d.append(
        np.append(
            ncc_threadcoarsening.groupby(['Platform'
                                          ])['Speedup'].mean().values,
            ncc_threadcoarsening['Speedup'].mean()))
    if FLAGS.device == 'all':
        d = np.array(d).T.reshape(5, 4)
        devs = [
            'AMD Radeon HD 5900', 'AMD Tahiti 7970', 'NVIDIA GTX 480',
            'NVIDIA Tesla K20c', 'Average'
        ]
    else:
        d = np.array(d).T.reshape(1, 4)
        devs = [_FLAG_TO_DEVICE_NAME[FLAGS.device]]
    print(
        '\n',
        pd.DataFrame(d,
                     columns=[
                         'Magni et al.', 'DeepTune', 'DeepTuneTL',
                         'DeepTuneInst2Vec'
                     ],
                     index=devs))
Exemplo n.º 5
0
def main(argv):
    del argv  # unused

    ####################################################################################################################
    # Setup
    # Get flag values
    embeddings = task_utils.get_embeddings()
    out = FLAGS.out
    if not os.path.exists(out):
        os.makedirs(out)
    device = FLAGS.device
    assert device in ['all', 'amd', 'nvidia'], \
        'Choose device among: all, amd, nvidia'
    dense_layer_size = FLAGS.dense_layer
    print_summary = FLAGS.print_summary
    num_epochs = FLAGS.num_epochs
    batch_size = FLAGS.batch_size
    input_data = FLAGS.input_data
    if not os.path.exists(os.path.join(input_data, 'kernels_ir')):

        # Download data
        task_utils.download_and_unzip(
            'https://polybox.ethz.ch/index.php/s/U08Z3xLhvbLk8io/download',
            'devmap_training_data', input_data)

    task_utils.llvm_ir_to_trainable(os.path.join(input_data, 'kernels_ir'))

    ####################################################################################################################
    # Reference values
    # Values copied from:
    # https://github.com/ChrisCummins/paper-end2end-dl/blob/master/code/Case%20Study%20A.ipynb
    static_pred_vals = [58.823529, 56.911765]
    static_pred_mean = 57.867647
    static_sp_vals = [1.0, 1.0]
    static_sp_mean = 1.0
    grewe_pred_vals = [73.382353, 72.941176]
    grewe_pred_mean = 73.161765
    grewe_sp_vals = [2.905822, 1.264801]
    grewe_sp_mean = 2.085312
    deeptune_pred_vals = [83.676471, 80.294118]
    deeptune_pred_mean = 81.985294
    deeptune_sp_vals = [3.335612, 1.412222]
    deeptune_sp_mean = 2.373917

    ####################################################################################################################
    # Train model
    print("Evaluating DeepTuneInst2Vec ...")
    ncc_devmap = evaluate(NCC_devmap(), device, input_data, out, embeddings,
                          dense_layer_size, print_summary, num_epochs,
                          batch_size)

    ####################################################################################################################
    # Print results
    print('\n--- Prediction results')
    print(
        ncc_devmap.groupby(['Platform',
                            'Benchmark Suite'])['Platform', 'Correct?',
                                                'Speedup'].mean())
    print('\n--- Prediction results (summarized)')
    print(
        ncc_devmap.groupby(['Platform'])['Platform', 'Correct?',
                                         'Speedup'].mean())

    # Model comparison: prediction accuracy
    print('\n--- Model comparison: prediction accuracy')
    d = list()
    d.append(np.append(static_pred_vals, static_pred_mean))
    d.append(np.append(grewe_pred_vals, grewe_pred_mean))
    d.append(np.append(deeptune_pred_vals, deeptune_pred_mean))
    d.append(
        np.append(
            ncc_devmap.groupby(['Platform'])['Correct?'].mean().values * 100,
            ncc_devmap['Correct?'].mean() * 100))
    d = np.array(d).T.reshape(3, 4)
    print(
        '\n',
        pd.DataFrame(d,
                     columns=[
                         'Static mapping', 'Grewe et al.', 'DeepTune',
                         'DeepTuneInst2Vec'
                     ],
                     index=['AMD Tahiti 7970', 'NVIDIA GTX 970', 'Average']))

    # Model comparison: speedups
    print('\n--- Model comparison: speedups')
    d = list()
    d.append(np.append(static_sp_vals, static_sp_mean))
    d.append(np.append(grewe_sp_vals, grewe_sp_mean))
    d.append(np.append(deeptune_sp_vals, deeptune_sp_mean))
    d.append(
        np.append(
            ncc_devmap.groupby(['Platform'])['Speedup'].mean().values,
            ncc_devmap['Speedup'].mean()))
    d = np.array(d).T.reshape(3, 4)
    print(
        '\n',
        pd.DataFrame(d,
                     columns=[
                         'Static mapping', 'Grewe et al.', 'DeepTune',
                         'DeepTuneInst2Vec'
                     ],
                     index=['AMD Tahiti 7970', 'NVIDIA GTX 970', 'Average']))