Пример #1
0
def multiprocess(traces, commands, raw_config):
    # Get Simulator Configurations...
    config = SimulatorConfig(
        C=raw_config['C'],
        L=raw_config['L'],
        K=raw_config['K'],
        N=raw_config['N'],
        BIT_SIZE=constants.BIT_SIZE,
        input_label=commands.input_file_label,
    )
    simulation_results = run(traces, config)

    # Open CSV file to write...
    output_file = constants.OUTPUT_FOLDER_PATH + populate_output_file_label(
        commands.input_file_label,
        C=raw_config['C'],
        L=raw_config['L'],
        K=raw_config['K'],
        N=raw_config['N'],
    )
    with open(output_file, 'w+') as csv_file:
        # Print out result file as CSV
        csv_manager = CsvManager(csv_file, [
            'Input',
            'Cache-Capacity',
            'L',
            'K',
            'N',
            'Hit-Ratio',
            'Miss-Ratio',
            'AMAT',
            'Hit-Count',
            'Miss-Count',
            'Access-Count',
        ])
        csv_manager.write_row(simulation_results)
Пример #2
0
  from optparse import OptionParser
  parser = OptionParser('"')
  parser.add_option('-i', '--inputFile', dest='input_file')
  parser.add_option('-o', '--outputFile', dest='output_file')
  
  options, otherjunk = parser.parse_args(argv)
  return options

options = parse_commands(sys.argv[1:])

parser = None
weka_objects = None
with open(options.input_file, 'r') as weka_file:
  parser = WekaParser(weka_file)
  weka_objects = parser.parse()

weka_manager = WekaManager(weka_objects)
weka_manager.filter_objects()
weka_manager.normalize()

analyze_result = weka_manager.analyze()

with open(options.output_file, 'w+') as csv_file:
  csv_manager = CsvManager(csv_file, ['Type', 'Source', 'Target', 'Weight'])
  for relationship, weight in analyze_result.items():
    csv_manager.write_row({
      'Type': 'Undirected',
      'Source': relationship[0],
      'Target': relationship[1],
      'Weight': weight,
    })
Пример #3
0
def run(commands):
  input_file = constants.INPUT_FOLDER_PATH + commands.input_file_label
  # Parse trace file to programmable.
  traces = []
  with open(input_file, 'r') as trace_file:
    traces = trace_parser.parse(trace_file, constants.BIT_SIZE)

  # Config for L1 I/D, L2 (Fixed)
  config_L1_inst = CacheConfig(
    C=L1_CACHE_SIZE, L=BLOCK_SIZE, K=1, N=512,
    BIT_SIZE=constants.BIT_SIZE,
    input_label=commands.input_file_label,
    HIT_TIME=4,
    MISS_PENALTY=16,
  )
  config_L1_data = CacheConfig(
    C=L1_CACHE_SIZE, L=BLOCK_SIZE, K=1, N=512,
    BIT_SIZE=constants.BIT_SIZE,
    input_label=commands.input_file_label,
    HIT_TIME=4,
    MISS_PENALTY=16,
  )
  config_L2 = CacheConfig(
    C=L2_CACHE_SIZE, L=BLOCK_SIZE, K=8, N=512,
    BIT_SIZE=constants.BIT_SIZE,
    input_label=commands.input_file_label,
    HIT_TIME=16,
    MISS_PENALTY=32,
  )

  raw_configs_dicts_L3 = {}
  with open('configs/project.json', 'r') as raw_config_file:
    raw_configs_dicts_L3 = json.load(raw_config_file)
  raw_configs_L3 = [
    {
      'C': L3_CACHE_SIZE,
      'L': BLOCK_SIZE,
      'K': raw_config['K'],
      'N': raw_config['N'],
      'INST_PREFETCHER': raw_config['INST_PREFETCHER'],
      'DATA_PREFETCHER': raw_config['DATA_PREFETCHER'],
      'REPLACEMENT': raw_config['REPLACEMENT'],
    }
    for raw_config in cartesian_dict_product(raw_configs_dicts_L3)
    if check_raw_config({
      'C': L3_CACHE_SIZE,
      'L': BLOCK_SIZE,
      'K': raw_config['K'],
      'N': raw_config['N'],
    })
  ]
  validate_raw_configs(raw_configs_L3)
  del raw_configs_dicts_L3

  for raw_config_L3 in raw_configs_L3:
    # Config for L3 (Dynamic)
    config_L3 = CacheConfig(
      C=raw_config_L3['C'],
      L=raw_config_L3['L'],
      K=raw_config_L3['K'],
      N=raw_config_L3['N'],
      BIT_SIZE=constants.BIT_SIZE,
      input_label=commands.input_file_label,
      HIT_TIME=32,
      MISS_PENALTY=120,
      # inst_prefetcher=constants.PREFETCHER_TYPE['STREAM_BUFFER'],
      inst_prefetcher=constants.PREFETCHER_TYPE[raw_config_L3['INST_PREFETCHER']],
      # data_prefetcher=constants.PREFETCHER_TYPE['WRITE_BUFFER'],
      data_prefetcher=constants.PREFETCHER_TYPE[raw_config_L3['DATA_PREFETCHER']],
      replacement_policy=constants.REPLACEMENT_POLICY_TYPE[raw_config_L3['REPLACEMENT']],
    )

    # TODO(totorody): Implements to run caches
    cache_L1_inst = Cache(config_L1_inst)
    cache_L1_data = Cache(config_L1_data)
    cache_L2 = Cache(config_L2)
    cache_L3 = Cache(config_L3)

    cache_L1_inst.set_low_cache(cache_L2)
    cache_L1_data.set_low_cache(cache_L2)
    cache_L2.set_low_cache(cache_L3)

    print('Start to run caching...')
    index = 0
    for trace in traces:
      if index % 10000 == 0:
        print('trace #:', index)
      index += 1
      if trace['type'] not in constants.ACCESS_TYPE.values():
        continue
      if trace['type'] == constants.ACCESS_TYPE['INST_READ']:
        cache_L1_inst.access(trace)
      else:
        cache_L1_data.access(trace)

    print('Prints cache simulation results...')
    inst_result = cache_L1_inst.get_result('L1-Inst')
    data_result = cache_L1_data.get_result('L1-Data')
    L2_result = cache_L2.get_result('L2')
    L3_result = cache_L3.get_result('L3')

    output_file = constants.OUTPUT_FOLDER_PATH \
        + populate_output_file_label(config_L3)
    with open(output_file, 'w+') as csv_file:
      csv_manager = CsvManager(csv_file, inst_result.keys())
      csv_manager.write_row(inst_result)
      csv_manager.write_row(data_result)
      csv_manager.write_row(L2_result)
      csv_manager.write_row(L3_result)