shape = m0.shape
ndims = len(spacing)

idx = 800  #shot[node_idx]
print("Process shot no.: ", idx)
dorig, sx, sz, gx, gz, tn, dt, nt = segy_get(bucket, data_path,
                                             data_name + str(idx) + '.segy')

print("Shape: ", shape)

# Load previous iterations
if iteration == 1:
    x = np.zeros(shape=shape, dtype='float32')
else:
    x = array_get(
        bucket,
        variable_path + 'chunk_1/' + variable_name + str(iteration - 1))
    if num_chunks > 1:
        for chunk in range(1, num_chunks):
            x_chunk = array_get(
                bucket, variable_path + 'chunk_' + str(chunk + 1) + '/' +
                variable_name + str(iteration - 1))
            x = np.concatenate((x, x_chunk), axis=0)
    x = x.reshape(shape[0], shape[1], order='F')

# Set up model structures
model = Model(shape=shape, origin=origin, spacing=spacing, vp=np.sqrt(1 / m0))

# Time axis
t0 = 0.
dt_comp = model.critical_dt
import numpy as np
import boto3, json, time, datetime, sys
import matplotlib.pyplot as plt
from pytz import timezone
from CloudExtras import array_get

# Read parameters
with open('parameters.json', 'r') as filename:
    parameters = json.load(filename)

# EC2 bare metal results
key = 'timings_omp_bare_metal/strong_scaling_omp_bare_metal_numthreads_'
num_cores = np.array([1, 2, 4, 8, 16, 24])
num_files = 9
T = []

for i in range(num_files):
    for run in range(3):
        T = array_get(parameters['bucket_name'],
                      key + str(num_cores[i]) + '_run_' + str(run))
        T.dump('results_bare_metal/timings_strong_scaling_omp_' +
               str(num_cores[i]) + '_run_' + str(run))
Ejemplo n.º 3
0
        break
    time.sleep(10)

# Gather timings (unix time stamps)
print("Get timings")
T = np.zeros(shape=(3))
T[0] = array_job['createdAt']
T[1] = array_job['startedAt']
T[2] = array_job['stoppedAt']

# Get Devito runtimes and script runtimes
key = 'timings_hybrid/hybrid_scaling'
bucket = parameters['bucket_name']
while True:
    try:
        tcont = array_get(bucket, key)
        break
    except:
        time.sleep(1)

T2 = np.zeros(len(tcont))  # should contain 3 values
for j in range(len(T2)):
    T2[j] = tcont[j]

TT = np.concatenate(
    (T, T2), axis=0
)  # should be 4 + 3 values: creation, start, stop, gradient_timestamp, kernel time, python_runtime, devito_runtime

# Save results w/ current datetime stamp
timestamp = datetime.datetime.now()
timestamp = str(timestamp).replace(' ', '')  # remove spaces
files_r5 = client.list_objects_v2(Bucket=bucket,
                                  Prefix='pwitte/timings/r5/mpi_scaling')
num_timings_r5 = len(files_r5['Contents'])
file_list_r5 = []
for j in range(num_timings_r5):
    file_list_r5.append(files_r5['Contents'][j]['Key'])

timings_r5 = []
for batchsize in num_instances:
    run = 0
    for filename in file_list_r5:
        if filename[
                0:int(41 + len(str(batchsize))
                      )] == 'pwitte/timings/r5/mpi_scaling_num_nodes_' + str(
                          batchsize) + '_':
            t = array_get(bucket, filename)
            t.dump('results/r5/timings_num_nodes_' + str(batchsize) + '_run_' +
                   str(run))
            run += 1

files_c5n = client.list_objects_v2(Bucket=bucket,
                                   Prefix='pwitte/timings/c5n/mpi_scaling')
num_timings_c5n = len(files_c5n['Contents'])
file_list_c5n = []
for j in range(num_timings_c5n):
    file_list_c5n.append(files_c5n['Contents'][j]['Key'])

timings_c5n = []
for batchsize in num_instances:
    run = 0
    for filename in file_list_c5n: