Esempio n. 1
0
def main():
    args = parse_args()
    network_handler = getattr(importlib.import_module('utils.networks'),
                              args.network)
    output_dir = os.path.join(root_path, args.exp_path, 'network_protos')
    mkdir_if_not_exists(output_dir)
    assert (args.mode in ['train', 'test']), 'Invalid value for mode'
    assert (args.input_source in ['file',
                                  'python']), 'Invalid value for input_source'

    if args.mode == 'train' and args.input_source == 'python':
        raise ValueError('Not allow training with \'python\' as input_source')

    param_str = {p[0]: cast_param(p[0], p[1]) for p in args.param_str}
    input_shape = [[int(x) for x in s] for s in args.input_shape]

    if args.input_source == 'file':
        network_kwargs = dict(param_str=param_str)
        output_name = os.path.splitext(os.path.basename(
            param_str['data_file']))[0] + '.prototxt'
    else:
        network_kwargs = dict(input_shape=input_shape)
        output_name = '{}_output{}.prototxt'.format(args.network,
                                                    args.num_output)

    network = network_handler(num_output=args.num_output,
                              mode=args.mode,
                              input_source=args.input_source,
                              **network_kwargs)
    network.build()
    print('Output: {}'.format(os.path.join(output_dir, output_name)))
    with open(os.path.join(output_dir, output_name), 'w') as f:
        f.write(network.to_str())
    return
Esempio n. 2
0
    def __init__(self, config) -> None:
        super(NMTEncoderDecoderWrapper, self).__init__(config)
        self.config['metrics'] = ['dev_ppl']
        self.config.metrics.extend(self._get_metrics())

        for metric in self.config.metrics:
            self.config['best_{}_dir'.format(metric)] = os.path.join(self.config.model_dir, 'best_{}'.format(metric))
            mkdir_if_not_exists(self.config['best_{}_dir'.format(metric)])
            best_metric = 'best_{}'.format(metric)
            if best_metric not in self.config:
                self.config[best_metric] = float('inf')

        self.config['checkpoint_file'] = os.path.join(self.config.model_dir,
                                                      '{}.ckpt'.format(self._get_checkpoint_name()))

        if self.config.mode == 'train':
            self.config['num_train_steps'] = int(self.config.num_train_epochs * math.ceil(
                count_lines(self.config.train_data) / self.config.batch_size))

        self.config.vocab_file = os.path.join(self.config.model_dir,
                                              'vocab{}.in'.format(self.config.original_vocab_size
                                                                  if 'original_vocab_size' in self.config
                                                                  else self.config.vocab_size))

        if 'epoch_step' not in self.config:
            self.config['epoch_step'] = 0

        if 'epoch' not in self.config:
            self.config['epoch'] = 0

        self._vocab_table = None
Esempio n. 3
0
def main():
    parser = build_parser()
    args = parser.parse_args()

    config = tf.ConfigProto()
    config.gpu_options.per_process_gpu_memory_fraction = 0.4
    sess = tf.Session(config=config)
    train_model = Model(sess, args)

    style_image_basename = os.path.basename(args.style)
    style_image_basename = style_image_basename[:style_image_basename.find("."
                                                                           )]

    args.checkpoint_dir = os.path.join("./examples/checkpoint",
                                       style_image_basename)
    args.serial = os.path.join("./examples/serial", style_image_basename)

    print("[*] Checkpoint Directory: {}".format(args.checkpoint_dir))
    print("[*] Serial Directory: {}".format(args.serial))
    mkdir_if_not_exists(args.serial, args.checkpoint_dir)

    if args.continue_train:
        train_model.finetune_model(args)
    else:
        train_model.train(args)
def init_dirs(fuzzing_dir):
    for dir in [consts.INSTANCES, consts.DLL_WORKING_DIR, consts.ENTRY_POINTS_WORKING_DIR,
                consts.FUNCTION_WORKING_DIR, consts.XREF_WORKING_DIR]:
        path = os.path.join(fuzzing_dir, dir)
        if os.path.exists(path):
            pass #shutil.rmtree(path)
        utils.mkdir_if_not_exists(path)
Esempio n. 5
0
def main():
    runs = sorted(args.runs)
    for mode in ['train', 'valid', 'test']:
        f, axarr = plt.subplots(2, 2)
        for run in runs:
            rebufs, smooths, qualities, qoes = [], [], [], []
            d = os.path.join(args.results_dir, run, mode)
            for fname in os.listdir(d):
                with open(os.path.join(d, fname, 'results.json')) as f:
                    j = json.load(f)
                rebufs.append(j['avg_rebuf_penalty'])
                smooths.append(j['avg_smoothness_penalty'])
                qualities.append(j['avg_quality_score'])
                qoes.append(j['avg_net_qoe'])

            plot_cdf(axarr[0][0], qualities, label=run)
            axarr[0][0].set_xlabel('quality score')
            plot_cdf(axarr[0][1], rebufs, label=run)
            axarr[0][1].set_xlabel('rebuf penalty')
            plot_cdf(axarr[1][0], smooths, label=run)
            axarr[1][0].set_xlabel('smooth penalty')
            plot_cdf(axarr[1][1], qoes, label=run)
            axarr[1][1].set_xlabel('net qoe')

        plt.tight_layout()
        utils.mkdir_if_not_exists(os.path.join(OUT_DIR, '_'.join(runs)))
        plt.savefig(os.path.join(OUT_DIR, '_'.join(runs), '%s.png' % mode))
Esempio n. 6
0
 def run(self):
     utils.mkdir_if_not_exists(os.path.dirname(self.rawfile))
     with open(self.rawfile, 'w') as f:
         # Solstice cannot take too long string of angle arguments, so split into chunks
         for i in range(0, len(self.angle_pairs), 50):
             chunk = self.angle_pairs[i:i + 50]
             chunk = ":".join(chunk)
             cmd = f'solstice -D {chunk} -n {self.rays} -v -R {receiver} {self.geometry}'.split(
             )
             subprocess.run(cmd, stdout=f)
Esempio n. 7
0
File: loop.py Progetto: quatrix/rekt
    def wait_for_sane_state(self):
        while True:
            work_dir = find_work_dir()

            if work_dir is None:
                self.make_rgb_white()
            else:
                self.upload_dir = os.path.join(work_dir, "to_upload")
                mkdir_if_not_exists(self.upload_dir)
                return
Esempio n. 8
0
 def export_obj(self):
     for pair in [self.angle_pairs[0], self.angle_pairs[-1]]:
         pair_str = pair.replace(',', '_')
         fname = f"{self.name}_{pair_str}.obj"
         utils.mkdir_if_not_exists(self.shape_dir)
         objpath = os.path.join(self.shape_dir, fname)
         cmd = f'solstice  -n 100 -g format=obj -t1 -D {pair} -R {receiver} {self.geometry}'.split(
         )
         with open(objpath, 'w') as f:
             subprocess.run(cmd, stdout=f)
Esempio n. 9
0
def heatmap(df, col='efficiency'):
    pic_path = os.path.join(os.getcwd(), "export", df.direction_geometry, "heatmaps", df.direction_name + col)
    utils.mkdir_if_not_exists(os.path.dirname(pic_path))
    df1 = df.pivot(index='abs_y', columns='abs_x', values=col)
    fig, ax = plt.subplots(figsize=(10, 7))
    sns.heatmap(df1, cbar_kws={'label': col},
                xticklabels=10, yticklabels=10)
    plt.title(df.direction_geometry)
    plt.savefig(pic_path)
    plt.show()
Esempio n. 10
0
    def wait_for_sane_state(self):
        while True:
            work_dir = find_work_dir()

            if work_dir is None:
                self.make_rgb_white()
            else:
                self.upload_dir = os.path.join(work_dir, 'to_upload')
                mkdir_if_not_exists(self.upload_dir)
                return
Esempio n. 11
0
 def export_vtk(self, nrays=1000):
     for pair in [self.angle_pairs[0], self.angle_pairs[-1]]:
         pair_str = pair.replace(',', '_')
         fname = f"{self.name}_{pair_str}.vtk"
         utils.mkdir_if_not_exists(self.shape_dir)
         vtkpath = os.path.join(self.exp_dir, "shapes", fname)
         cmd = f'solstice  -n {nrays} -p default -t1 -D {pair} -R {receiver} {self.geometry}'.split(
         )
         with open(vtkpath, 'w') as f:
             subprocess.run(cmd, stdout=f)
         utils.del_first_line(vtkpath)
Esempio n. 12
0
def main():
    args = parse_args()
    assert (args.mode in ['gpu', 'cpu']), 'Invalid value for mode'
    if args.mode == 'gpu':
        caffe.set_mode_gpu()
        if args.gpu_id != -1:
            caffe.set_device(args.gpu_id)
    else:
        caffe.set_mode_cpu()

    test_net = os.path.join(root_path, args.test_net)
    test_net_prefix = os.path.splitext(os.path.basename(test_net))[0].replace(
        '_Test', '')
    model_dir = os.path.join(root_path, args.exp_path, 'models',
                             args.model_prefix)
    result_dir = os.path.join(root_path, args.exp_path, 'results')
    mkdir_if_not_exists(result_dir)
    if test_net_prefix == args.model_prefix:
        output_name = args.model_prefix
    else:
        output_name = '{}_{}'.format(args.model_prefix, test_net_prefix)

    info_dict = parse_model_info(model_dir)
    if args.to_clean:
        clean_useless_models(model_dir, info_dict['Best epoch'])
    model_name = 'epoch{}.caffemodel'.format(info_dict['Best epoch'])
    model = caffe.Net(test_net, os.path.join(model_dir, model_name),
                      caffe.TEST)
    r, l, p, n = run_test(model, args.output_blob, args.label_blob,
                          info_dict['AUs'])
    test_results = dict()
    test_results['row_name'] = n
    test_results['f1'] = r
    with open(os.path.join(result_dir, output_name + '.pkl'), 'w') as f_r:
        cPickle.dump(test_results, f_r)

    if args.store_preds:
        predict_dir = os.path.join(root_path, args.exp_path, 'predicts')
        mkdir_if_not_exists(predict_dir)
        f_p = h5py.File(os.path.join(predict_dir, output_name + '.h5'), 'w')
        f_p.attrs['AUs'] = n
        set_data = f_p.create_group('Test')
        set_data.create_dataset('preds',
                                p.shape,
                                dtype=np.float32,
                                compression='gzip')
        set_data['preds'][...] = p
        set_data.create_dataset('labels',
                                l.shape,
                                dtype=np.float32,
                                compression='gzip')
        set_data['labels'][...] = l
        f_p.close()
    return
Esempio n. 13
0
 def export_one_obj(self, angle_pair_index):
     """ Export one obj for the provided angle_pair index """
     pair = self.angle_pairs[angle_pair_index]
     pair_str = pair.replace(',', '_')
     fname = f"{self.name}_{pair_str}.obj"
     utils.mkdir_if_not_exists(self.shape_dir)
     objpath = os.path.join(self.shape_dir, fname)
     cmd = f'solstice  -n 100 -g format=obj -t1 -D {pair} -R {receiver} {self.geometry_path}'.split(
     )
     with open(objpath, 'w') as f:
         subprocess.run(cmd, stdout=f)
     return objpath
def main():
  mkdir_if_not_exists(IMAGE_DIR)
  empty_dir(IMAGE_DIR)
  grab_area = read_grab_area()
  crop_areas = find_crop_areas()
  for _ in tqdm(range(2 ** 11)):
    ss = grab_screen(grab_area)
    draw_crop_areas(ss, crop_areas)
    for img in crop_images(ss, crop_areas):
      save_path = os.path.join(IMAGE_DIR, generate_uuid() + '.png')
      cv2.imwrite(save_path, img)
    refresh(grab_area)
    time.sleep(1)
Esempio n. 15
0
def plot_heatmap(trace, values='efficiency'):
    df = reader.read_mean(trace)
    pic_path = os.path.join(trace.exp_dir, "heatmaps", trace.name)
    utils.mkdir_if_not_exists(os.path.dirname(pic_path))
    df1 = df.pivot(index='abs_y', columns='abs_x', values=values)
    fig, ax = plt.subplots(figsize=(10, 7))
    sns.heatmap(df1,
                cbar_kws={'label': 'efficiency'},
                xticklabels=10,
                yticklabels=10)
    plt.title(trace.title)
    plt.savefig(pic_path)
    plt.show()
Esempio n. 16
0
def geometry_quantities(df, quantities_list):
    """ Plots list of df columns in same plot """
    pic_path = os.path.join(os.getcwd(), "export", df.direction_geometry, "plots",
                            df.direction_name, '-'.join(quantities_list))
    utils.mkdir_if_not_exists(os.path.dirname(pic_path))
    fig, ax = plt.subplots(figsize=(9, 6))
    for col in quantities_list:
        ax.plot(df[col], label=get_label(col))
    ax.set_xlabel("$\\theta_z \quad  (\degree)$")
    if contains_flux_or_losses(quantities_list):
        ax.set_ylabel("Watts")
    ax.legend()
    plt.savefig(pic_path)
    plt.show()
Esempio n. 17
0
def export_obj_vtk(az, zen):
    utils.mkdir_if_not_exists("out")
    pairs = f"{az:.1f},{zen:.1f}"
    objpath = os.path.join("out", pairs + ".obj")
    vtkpath = os.path.join("out", pairs + ".vtk")
    obj_cmd = f'solstice  -n 1 -g format=obj -t1 -D {pairs} -R {receiver} {geometry}'.split(
    )
    vtk_cmd = f'solstice  -n 100 -p default -t1 -D {pairs} -R {receiver} {geometry}'.split(
    )
    with open(objpath, 'w') as o:
        subprocess.run(obj_cmd, stdout=o)
    with open(vtkpath, 'w') as v:
        subprocess.run(vtk_cmd, stdout=v)
    utils.del_first_line(vtkpath)
    return objpath, vtkpath
Esempio n. 18
0
def text2excel(outfile, *infiles):
    '''
    infiles can be a string: 'a.xls,b.xls'          ('a.xls,b.xls', )
                  or a list: ['a.xls', 'b.xls']     (['a.xls', 'b.xls'], )
    or many positional args: 'a.xls', 'b.xls'       ('a.xls', 'b.xls')
    '''

    # wb = openpyxl.Workbook(encoding='utf8')
    wb = openpyxl.Workbook()

    if len(infiles) == 1:
        if isinstance(infiles[0], str):
            infile_list = infiles[0].split(',')
        elif isinstance(infiles[0], list):
            infile_list = infiles[0]

    elif len(infiles) >= 2:
        infile_list = list(infiles)

    else:
        exit('error infiles: {}'.format(infiles))

    for infile in infile_list:
        sheetname = get_sheetname(infile)
        print 'create sheet:', sheetname
        sheet = wb.create_sheet(title=sheetname)
        # with codecs.open(infile, mode='r', encoding='gbk', errors='ignore') as f:
        with safe_open(infile) as f:
            for n, line in enumerate(f):
                row = n + 1
                linelist = line.strip().split('\t')
                for m, value in enumerate(linelist):
                    column = m + 1
                    sheet.cell(row=row, column=column, value=value)

    # remove default sheet
    try:
        wb.remove(wb['Sheet'])
    except AttributeError:
        wb.remove_sheet(wb.get_sheet_by_name('Sheet'))  # for the old version

    outdir = os.path.dirname(outfile)
    if outdir:
        mkdir_if_not_exists(outdir)

    wb.save(filename=outfile)

    print 'write excel file:', outfile
Esempio n. 19
0
def main():
    args = parse_args()
    train_name = os.path.splitext(os.path.basename(args.train_net))[0]
    assert (
        '_Train'
        in train_name), 'Assume training proto contains \'_Train\' in its name'
    model_prefix = train_name.replace('_Train', '')
    model_dir = os.path.join(root_path, args.exp_path, 'models', model_prefix)
    mkdir_if_not_exists(model_dir)
    output_dir = os.path.join(root_path, args.exp_path, 'solver_protos')
    mkdir_if_not_exists(output_dir)
    assert (args.mode in ['gpu', 'cpu']), 'Invalid value for mode'

    config_parser = SafeConfigParser()
    config_parser.read(os.path.join(root_path, args.config_file))
    s = caffe_pb2.SolverParameter()
    s.train_net = os.path.join(root_path, args.train_net)
    s.test_net.extend([os.path.join(root_path, x) for x in args.test_nets])
    max_iter = 10**8
    s.snapshot_prefix = os.path.join(model_dir, '')
    if args.mode == 'gpu':
        s.solver_mode = caffe_pb2.SolverParameter.GPU
    else:
        s.solver_mode = caffe_pb2.SolverParameter.CPU

    # Setting default parameters
    s.random_seed = 0xCAFFE
    s.test_interval = max_iter + 1
    s.test_iter.extend([0 for _ in xrange(len(args.test_nets))])
    s.test_initialization = True
    s.max_iter = max_iter
    s.display = 100
    s.snapshot = max_iter + 1

    # Setting parameters from the config file
    for key in config_parser.options(args.config_section):
        value = read_param(config_parser, args.config_section, key)
        setattr(s, key, value)

    output_name = model_prefix + '_{}.prototxt'.format(args.config_section)
    print('Output: {}'.format(os.path.join(output_dir, output_name)))
    with open(os.path.join(output_dir, output_name), 'w') as f:
        f.write(str(s))
    return
Esempio n. 20
0
def main():
    parser = build_parser()
    args = parser.parse_args()
    os.environ["CUDA_VISIBLE_DEVICES"] = '0'
    config = tf.ConfigProto()
    config.gpu_options.per_process_gpu_memory_fraction = 0.9
    sess = tf.Session(config=config)
    train_model = Model(sess, args)

    image = Image.open(args.content_path)
    img = np.array(image)
    amount = args.noise_amount

    if args.noise == 'salt_and_pepper':
        img = util.random_noise(img, mode='s&p', amount=amount)
    elif args.noise == 'poisson':
        img = util.random_noise(img, mode='poisson')
    elif args.noise == 'gaussian':
        img = util.random_noise(img, var=amount**2, mean=0.05)
    elif args.noise == 'no':
        pass

    style_image_basename = os.path.basename(args.content_path)
    style_image_basename = style_image_basename[:style_image_basename.find("."
                                                                           )]

    if args.noise != 'no':
        n = (style_image_basename + "noise")
        io.imsave("./examples/content/{}.png".format(n), img)
        args.content_path = ("./examples/content/{}.png".format(n))

    args.checkpoint_dir = os.path.join("./examples/checkpoint",
                                       style_image_basename)
    args.serial = os.path.join("./examples/serial", style_image_basename)

    print("[*] Checkpoint Directory: {}".format(args.checkpoint_dir))
    print("[*] Serial Directory: {}".format(args.serial))
    mkdir_if_not_exists(args.serial, args.checkpoint_dir)

    if args.continue_train:
        train_model.finetune_model(args)
    else:
        train_model.train(args)
Esempio n. 21
0
 def export_obj_vtk(self, angle_pair_index, nrays=100):
     """ Export one obj and vtk for the provided angle_pair index,
     Returns paths to be used in pyvista """
     pair = self.angle_pairs[angle_pair_index]
     pair_str = pair.replace(',', '_')
     fname = f"{self.name}_{pair_str}"
     utils.mkdir_if_not_exists(self.shape_dir)
     objpath = os.path.join(self.shape_dir, fname + ".obj")
     vtkpath = os.path.join(self.shape_dir, fname + ".vtk")
     obj_cmd = f'solstice  -n 100 -g format=obj -t1 -D {pair} -R {receiver} {self.geometry_path}'.split(
     )
     vtk_cmd = f'solstice  -n {nrays} -p default -t1 -D {pair} -R {receiver} {self.geometry_path}'.split(
     )
     with open(objpath, 'w') as o:
         subprocess.run(obj_cmd, stdout=o)
     with open(vtkpath, 'w') as v:
         subprocess.run(vtk_cmd, stdout=v)
     utils.del_first_line(vtkpath)
     return objpath, vtkpath
Esempio n. 22
0
    def link_data(source, dest):

        if isinstance(source, list):
            sources = source
        elif '*' in source:
            sources = glob.glob(source)
        else:
            sources = [source]
        # print sources

        if not sources:
            print '[warn] no data from: {}'.format(source)

        for s in sources:
            if not os.path.exists(s):
                print '[warn] file not exists: {}'.format(s)

            else:
                dest_dir = os.path.dirname(dest)
                utils.mkdir_if_not_exists(dest_dir)

                cmd = 'ln -sf {} {}'.format(s, dest)

                os.system(cmd)
Esempio n. 23
0
def hierarchical_diagnosis(program, fuzzing_dir, is_continuous):
    """
    diagnose the program in few hierarchical steps:
    1) dll diagnoses
    1.1*) dll entry points diagnoses
    2) function diagnoses
    3) xref diagnoses
    :param program: program to diagnose
    :param fuzzing_dir: working dir
    :param is_continuous: whether to use known bugs or use the bugs from previous step
    :return:
    """
    init_dirs(fuzzing_dir)
    seedfiles_dir = os.path.join(fuzzing_dir, consts.SEEDFILES)
    exploit_dir = os.path.join(fuzzing_dir, consts.EXPLOIT_DIR)
    utils.copy_files_to_dir(exploit_dir, seedfiles_dir)
    instances_dir = os.path.join(fuzzing_dir, consts.INSTANCES)
    config = yaml.load(open(os.path.join(fuzzing_dir, "config.yaml")))
    dll_working_dir = utils.mkdir_if_not_exists(os.path.join(fuzzing_dir, consts.DLL_WORKING_DIR))
    dll_matrix_file = os.path.join(fuzzing_dir, consts.DLL_MATRIX)
    function_matrix_file = os.path.join(fuzzing_dir, consts.FUNCTION_MATRIX)
    dominator_matrix_file = os.path.join(fuzzing_dir, consts.DOMINATOR_MATRIX)
    # entry_points_file = os.path.join(fuzzing_dir, consts.ENTRY_POINTS_MATRIX)
    utils.copy_files_to_dir(seedfiles_dir, instances_dir)
    utils.copy_files_to_dir(consts.EXAMPLES_DIR, instances_dir)
    fuzz_project_dir(seedfiles_dir, instances_dir, consts.FUZZ_ITERATIONS)

    # dll diagnoses
    run_debugger_on_files(program, utils.get_files_in_dir(instances_dir), dll_working_dir, config, DLL_GRANULARITY,
                          None, None)
    diagnoser.campaign_matrix.create_matrix_for_dir(dll_working_dir, os.path.join(fuzzing_dir, consts.DLL_DIAGNOSIS_RESULT),
                                                    dll_matrix_file)
    dll_instance = readPlanningFile(dll_matrix_file)
    dll_instance.diagnose()
    # # #
    # # # # # entry points diagnoses
    # # # # named_diagnoses = filter(lambda diag: diag.probability > consts.DLL_DIAGNOSIS_THRESHOLD or True,
    # # # #                          dll_instance.get_named_diagnoses())
    # # # # entry_points_working_dir = utils.mkdir_if_not_exists(os.path.join(fuzzing_dir, consts.ENTRY_POINTS_WORKING_DIR))
    # # # # run_debugger_on_files(program, utils.get_files_in_dir(instances_dir), entry_points_working_dir, config,
    # # # #                       ENTRY_POINTS_GRANULARITY,
    # # # #                       get_binaries_to_diagnose(named_diagnoses, config), None)
    # # # # diagnoser.campaign_matrix.create_matrix_for_dir(entry_points_working_dir,
    # # # #                                                 os.path.join(fuzzing_dir, consts.ENTRY_POINTS_DIAGNOSIS_RESULT),
    # # # #                                                 entry_points_file)
    # # # # entry_points_instance = readPlanningFile(entry_points_file)
    # # # # entry_points_instance.diagnose()
    # # #
    # # # # function diagnosis
    named_diagnoses = filter(lambda diag: diag.probability > consts.DLL_DIAGNOSIS_THRESHOLD,dll_instance.get_named_diagnoses())
    binaries_to_diagnose = get_binaries_to_diagnose(named_diagnoses, config)
    function_working_dir = utils.mkdir_if_not_exists(os.path.join(fuzzing_dir, consts.FUNCTION_WORKING_DIR))
    run_debugger_on_files(program, utils.get_files_in_dir(instances_dir), function_working_dir, config, FUNCTION_GRANULARITY,
                          binaries_to_diagnose, None)
    diagnoser.campaign_matrix.create_matrix_for_dir(function_working_dir, os.path.join(fuzzing_dir,
                                                                                       consts.FUNCTION_DIAGNOSIS_RESULT),
                                                    function_matrix_file)

    function_instance = readPlanningFile(function_matrix_file)
    function_instance.diagnose()

    # dominators diagnosis
    diagnosed_components = filter(lambda x: '&' in x and "crt" not in x and "sub_" not in x and "asan" not in x
                                  ,map(lambda x: x[0], function_instance.get_components_probabilities_by_name()))
    tracing_data = {}
    for comp in diagnosed_components:
        dll = comp.split('#')[1]
        address = comp.split('&')[0]
        tracing_data.setdefault(dll, []).append(address)
    dominator_working_dir = utils.mkdir_if_not_exists(os.path.join(fuzzing_dir, consts.DOMINATOR_WORKING_DIR))
    run_debugger_on_files(program, utils.get_files_in_dir(instances_dir), dominator_working_dir , config, DOMINATOR_GRANULARITY, binaries_to_diagnose, tracing_data)
    diagnoser.campaign_matrix.create_matrix_for_dir(dominator_working_dir, os.path.join(fuzzing_dir,
                                                                                   consts.FUNCTION_DIAGNOSIS_RESULT),
                                                    dominator_matrix_file)
    dominator_instance = readPlanningFile(dominator_matrix_file)
    dominator_instance.diagnose()

    # xref diagnosis
    diagnosed_components = map(lambda x: x[0],
                               filter(lambda x: '&' in x[0] and x[1] > 0.01,
                                      dominator_instance.get_components_probabilities_by_name()))
    diagnosed_components = map(lambda x: x[0], filter(lambda x: '&' in x[0],
                                  sorted(dominator_instance.get_components_probabilities_by_name(), key=lambda x: x[1],reverse=True))[:20])
    tracing_data = {}
    for comp in diagnosed_components:
        address, function_dll = comp.split('&')
        print function_dll
        tracing_data.setdefault(function_dll, []).extend(address.split("+"))
    xref_working_dir = utils.mkdir_if_not_exists(os.path.join(fuzzing_dir, consts.XREF_WORKING_DIR))
    run_debugger_on_files(program, utils.get_files_in_dir(instances_dir), xref_working_dir, config, XREF_GRANULARITY, binaries_to_diagnose, tracing_data)
    diagnoser.campaign_matrix.create_matrix_for_dir(xref_working_dir, os.path.join(fuzzing_dir,
                                                                                   consts.FUNCTION_DIAGNOSIS_RESULT),
                                                    os.path.join(fuzzing_dir, consts.XREF_MATRIX))
Esempio n. 24
0
def main():
  vectors = get_vectors(IMAGE_DIR)
  print('Writing to a tsv file...')
  mkdir_if_not_exists(TENSORBORAD_DIR)
  pd.DataFrame(vectors).to_csv(f'{TENSORBORAD_DIR}/vectors_all.tsv', sep='\t', index=False, header=False)
 def run(self):
     mkdir_if_not_exists(self.output_dir)
     for file_url in self.file_urls:
         download(file_url, self.output_dir)
Esempio n. 26
0
def main(argv):

    args, remaining_args = parse_args(argv[1:])
    vid = video.Video(args.video)

    if args.max_chunks is not None:
        vid.set_max_chunks(args.max_chunks)

    pqs = vid.get_bitrates()
    max_pq = max(pqs)
    pqs = [100 * float(q) / max_pq for q in pqs]
    # print("pqs: {}".format(pqs))

    pq_dict = {}
    for br, pq in zip(vid.get_bitrates(), pqs):
        pq_dict[br] = pq

    obj = objective.Objective(pq_dict, args.startup_penalty,
                              args.rebuffer_penalty, args.smooth_penalty)
    net = network.Network(args.mm_trace, args.mm_start_idx)
    env = Env(vid, obj, net)
    obj_client = copy.deepcopy(obj)
    vid_client = copy.deepcopy(vid)

    if args.instructor_mpc:
        from mpc.mpc import AbrAlg
        abr_alg_fn = AbrAlg
    elif args.instructor_bb:
        from bb.bb import AbrAlg
        abr_alg_fn = AbrAlg
    elif args.rb:
        from your_code.rb import AbrAlg
        abr_alg_fn = AbrAlg
    else:
        abr_alg_fn = abr.AbrAlg

    abr_alg = abr_alg_fn(vid_client, obj_client, remaining_args)

    total_rebuf_sec = 0
    rebuf_sec = None
    prev_chunk_rate = None
    buff = env.get_buffer_size()
    buff_lens = [buff]
    ttds = []

    for i in range(vid.num_max_chunks()):
        feedback = {
            "chunk_index": i,
            "rebuffer_sec": rebuf_sec,
            "download_rate_kbps": prev_chunk_rate,
            "buffer_sec": buff,
        }

        quality = abr_alg.next_quality(**feedback)

        ttd, rebuf_sec, smooth_pen, prev_chunk_rate = env.step(quality)

        buff = env.get_buffer_size()

        if i > 0:
            total_rebuf_sec += rebuf_sec

        buff_lens.append(buff)
        ttds.append(ttd)

    tot_qoe = env.get_total_qoe()
    avg_qoe = tot_qoe / vid.num_max_chunks()

    print('Avg QoE: ', avg_qoe)
    print('Total Rebuf (sec): ', total_rebuf_sec)
    print('Avg Down Rate (Mbps): %.2f' % (env.get_avg_down_rate_kbps() / KILO))

    i = 0
    for l in zip(*env.get_avg_qoe_breakdown(4)):
        print(
            '%d/%d part Qoe-Qual: %.2f, rp: %.2f, sp: %.2f, total-qoe: %.2f' %
            (i, 4, l[0], l[1], l[2], l[3]))
        i += 1

    for l in zip(*env.get_avg_qoe_breakdown(1)):
        print(
            '%d/%d part Qoe-Qual: %.2f, rp: %.2f, sp: %.2f, total-qoe: %.2f' %
            (0, 1, l[0], l[1], l[2], l[3]))

    # print('Avg network throughput (Mbps): %.2f'% network.avg_throughput_Mbps_time(args.mm_trace, vid.num_max_chunks()* 4.0))
    if args.results_dir is not None:
        utils.mkdir_if_not_exists(args.results_dir)
        env.log_qoe(os.path.join(args.results_dir, 'qoe.txt'))

    with open(os.path.join(args.results_dir, 'results.json'), 'w') as f:
        (qual, ), (rp, ), (sp, ), (qoe, ) = env.get_avg_qoe_breakdown(1)
        json.dump(dict(avg_quality_score=qual,
                       avg_rebuf_penalty=rp,
                       avg_smoothness_penalty=sp,
                       avg_net_qoe=qoe,
                       avg_download_rate=env.get_avg_down_rate_kbps()),
                  f,
                  indent=4,
                  sort_keys=True)
    generate_plts(args.results_dir, env.get_bitrates(), env.get_qoes(),
                  buff_lens, ttds, args.mm_trace, args.mm_start_idx)
Esempio n. 27
0
    parser.add_argument(
        "posts",
        type=str,
        default="100",
        help="number of posts. To have datasets with different cardinality",
    )
    parser.add_argument(
        "-s",
        "--start-date",
        type=str,
        default="20200101",
        help=
        "Start date for events, optional. Format YYYYMMDD. The min start date is 20170101",
    )
    parser.add_argument(
        "-e",
        "--end-date",
        type=str,
        default="20201231",
        help=
        "End date for events, optional. Format YYYYMMDD. The max end date is 20201103",
    )
    args = parser.parse_args()

    num_rows = str_human_to_number(args.posts)
    print(f'Generating dataset for {args.posts} posts')
    df = create_df(num_rows, start=args.start_date, end=args.end_date)
    mkdir_if_not_exists('datasets')
    filename = f"datasets/posts_{number_to_human_str(num_rows)}.csv"
    print(f'Saving file to {filename}')
    df.to_csv(filename, index=False)