예제 #1
0
      tstart = float(sys.argv[4])
    if len(sys.argv) > 5:
      tend = float(sys.argv[5])
  
  # LOAD FILES
  files1 = io.get_dumps_list(path1)
  files2 = io.get_dumps_list(path2)
  
  if len(files1) == 0 or len(files2) == 0:
      util.warn("INVALID PATH TO DUMP FOLDER")
      sys.exit(1)

  frame_dir = "frames_compare_"+movie_type
  util.make_dir(frame_dir)

  hdr1 = io.load_hdr(files1[0])
  hdr2 = io.load_hdr(files2[0])
  geom1 = io.load_geom(hdr1, path1)
  geom2 = io.load_geom(hdr2, path2)
  # TODO diags from post?
  # Load diagnostics from HARM itself
  diag1 = io.load_log(path1)
  diag2 = io.load_log(path2)

  nthreads = util.calc_nthreads(hdr1)
  if debug:
    for i in range(len(files1)):
      plot(i)
  else:
    util.run_parallel(plot, len(files1), nthreads)
예제 #2
0
  
  # LOAD FILES
  files = io.get_dumps_list(path)
  if len(files) == 0:
      util.warn("INVALID PATH TO DUMP FOLDER")
      sys.exit(1)

  frame_dir = "frames_"+movie_type
  util.make_dir(frame_dir)

  hdr = io.load_hdr(files[0])
  geom = io.load_geom(hdr, path)

  jmin, jmax = get_j_vals(geom)
  #print("jmin: {} jmax: {}".format(jmin, jmax))

  if diag_post:
    # Load fluxes from post-analysis: more flexible
    diag = pickle.load(open("eht_out.p", 'rb'))
  else:
    # Load diagnostics from HARM itself
    diag = io.load_log(path)

  nthreads = util.calc_nthreads(hdr, pad=0.3)
  if debug:
    # Run sequentially to make backtraces work
    for i in range(len(files)):
      plot(i)
  else:
    util.run_parallel(plot, len(files), nthreads)
예제 #3
0
파일: eht_unify.py 프로젝트: rndsrc/iharm3d
                    or key[-4:] == '_phi' or key[-4:] == '_rth'
                    or key[-6:] == '_thphi' or key[-5:] == '_rphi'
                    or key[-4:] == '_pdf'):
                uni[key] += avg[key] * avg['avg_w']
            elif key[-1:] == 't':
                if uni[key].shape[0] < avg[key].shape[0]:
                    uni[key] += avg[key][:uni[key].shape[0]]
                else:
                    uni[key][:avg[key].shape[0]] += avg[key]
            else:
                if uni[key].size < avg[key].size:
                    uni[key] += avg[key][:uni[key].size]
                else:
                    uni[key][:avg[key].size] += avg[key]

for key in direct_list:
    if key in avgs[avg_max_keys].keys():
        uni[key] = avgs[avg_max_keys][key]

# Add compat/completeness stuff
uni['mdot'] = uni['Mdot']
uni['phi_b'] = uni['Phi_b'] / np.sqrt(uni['Mdot'])

# Add the log versions of variables, for completeness/better ffts
if os.path.exists(sys.argv[-1]):
    uni['diags'] = io.load_log(sys.argv[-1])

with open("eht_out.p", "wb") as outf:
    print("Writing eht_out.p")
    pickle.dump(uni, outf)