예제 #1
0
def case_age_model():
    group = [
        ['S_kids', 'S_normal', 'S_risk'],
        ['E_kids', 'E_normal', 'E_risk'],
        ['I_kids', 'I_normal', 'I_risk'],
        ['I2'],
        ['I3'],
        ['R'],
        ['D']
    ]
    label = {
        "S_kids": "S",
        "E_kids": "E",
        "I_kids": "I_1",
        "I2": "I_2",
        "I3": "I_3"
    }
    data = get_results(
        simulation_age_model, (N, B_KIDS, G_KIDS, P_KIDS, B_NORMAL, G_NORMAL,
                               P_NORMAL, B_NORMAL, G_NORMAL, P_NORMAL), NUM_SIM
    )
    times, avg, std = fill_around_std(
        kolor_palette, data, 1, states=group, labels=label
    )
    save_data([times, avg, std], (N, NUM_SIM,))
예제 #2
0
def main():

    tasks = (
        simulate_queue(COPD, PROPS, num_servers, seed, MAX_TIME)
        for num_servers, seed in get_params()
    )

    with ProgressBar():
        queues = dask.compute(
            *tasks, scheduler="processes", num_workers=NUM_CORES
        )

    util_dfs, time_dfs = [], []
    for (num_servers, seed), queue in tqdm.tqdm(zip(get_params(), queues)):
        utilisations, system_times = get_results(
            queue, MAX_TIME, num_servers=num_servers, seed=seed
        )

        util_dfs.append(utilisations)
        time_dfs.append(system_times)

    utilisations = pd.concat(util_dfs)
    system_times = pd.concat(time_dfs)

    utilisations.to_csv(OUT_DIR / "utilisations.csv", index=False)
    system_times.to_csv(OUT_DIR / "system_times.csv", index=False)
def run_model((model_id, driver_id, Model, get_data, repeat)):
  testY = [1] * settings.SMALL_CHUNK + [0] * settings.SMALL_CHUNK

  if settings.ENABLE_CACHE:
    predictions = util.get_results(Model, get_data, driver_id, False, repeat)
    if predictions is not False:
      return predictions, testY

  multiplier = 4 if get_data in HEADING_DATA_FUNCTIONS else 1

  trainY = [1] * settings.BIG_CHUNK * multiplier * repeat + \
      [0] * settings.BIG_CHUNK * multiplier * repeat
  trainX, testX = get_data(model_id, driver_id, repeat)

  if type(trainX) in [scipy.sparse.csr.csr_matrix, scipy.sparse.coo.coo_matrix]:
    trainX = scipy.sparse.vstack(
        [trainX[:settings.BIG_CHUNK * multiplier]] * repeat +
        [trainX[settings.BIG_CHUNK * multiplier:]]
    )
  else:
    trainX = np.vstack((
        np.tile(np.array(trainX[:settings.BIG_CHUNK * multiplier]).T, repeat).T,
        trainX[settings.BIG_CHUNK * multiplier:]
    ))

  assert(trainX.shape[0] == len(trainY))
  assert(testX.shape[0] == len(testY))

  model = Model(trainX, trainY, driver_id)
  predictions = model.predict(testX)

  if settings.ENABLE_CACHE:
    util.cache_results(Model, get_data, driver_id, False, predictions, repeat)

  return predictions, testY
예제 #4
0
def case_no_intervention():
    label = {
        "I1": "I_1",
        "I2": "I_2",
        "I3": "I_3"
    }
    data = get_results(simulation_no_intervention, (N,), NUM_SIM)
    times, avg, std = fill_around_std(color_palette, data, 1, labels=label)
    save_data([times, avg, std], (N, NUM_SIM))
예제 #5
0
def case_average_time():
    t = 5
    output = get_results(
        simulation_max_time, (N, t), NUM_SIM
    )
    timepoints, avg, std = fill_around_std(
        color_palette, output, 1
    )
    save_data([timepoints, avg, std], (N, NUM_SIM, t))
예제 #6
0
def case_average_second_wave():
    data = get_results(simulation_second_wave, tuple(), NUM_SIM)
    labels = {
        "I1": "I_{1}", "I2": "I_{2}", "I3": "I_{3}"
    }
    times, avg, std = fill_around_std(
        color_palette, data, 1, labels=labels
    )
    save_data([times, avg, std], (N, NUM_SIM))
예제 #7
0
def contact_tracing(p):
    time = [5, 7.5, 10]
    data = get_results(simulation_contact_tracing, (N, p, time), NUM_SIM)
    states = [
        ["S"], ["E"], ["I1"], ["I2"], ["I3"], ["R"], ["D"], ["Q1", "Q2", "Q3"],
        ["QS", "QE"]
    ]
    labels = {
        "Q1": "Q_I", "QS": "Q_{S, E}",
        "I1": "I_1", "I2": "I_2", "I3": "I_3"
    }
    times, avg, std = fill_around_std(
        quolor_palette, data, 1, states=states, labels=labels
    )
    save_data([times, avg, std], (N, NUM_SIM, time, p))
예제 #8
0
def case_average_quarantine_rates():
    q1 = 0.5
    output = get_results(
        simulation_max_quarantine_rates, (N, q1, 0, 0), NUM_SIM
    )
    labels = {
        "I1": "I_1", "I2": "I_2", "I3": "I_3",
        "Q1": "Q", "Q2": "Q_2", "Q3": "Q_3",
    }
    groups = [
        ["Q1", "Q2", "Q3"]
    ]
    complete_group(groups, qolor_palette)
    timepoints, avg, std = fill_around_std(
        qolor_palette, output, 1, labels=labels, states=groups
    )
    save_data([timepoints, avg, std], (N, NUM_SIM, q1))
예제 #9
0
def test_model(xxx_todo_changeme1):
    (model_id, driver_id, Model, get_data, repeat) = xxx_todo_changeme1
    if settings.ENABLE_CACHE:
        predictions = util.get_results(Model, get_data, driver_id, True,
                                       repeat)
        if predictions is not False:
            return driver_id, predictions

    if get_data in HEADING_DATA_FUNCTIONS:
        return test_model_heading(model_id, driver_id, Model, get_data, repeat)

    rides, other_rides = get_data(model_id, driver_id, repeat, test=True)
    trainY = [1] * settings.BIG_CHUNK_TEST * repeat + [
        0
    ] * settings.BIG_CHUNK_TEST * repeat
    kf = KFold(200,
               n_folds=settings.FOLDS,
               shuffle=True,
               random_state=driver_id)
    predictions = ['bug'] * 200
    for train_fold, test_fold in kf:
        trainX = rides[train_fold]
        testX = rides[test_fold]

        if type(trainX) in [
                scipy.sparse.csr.csr_matrix, scipy.sparse.coo.coo_matrix
        ]:
            trainX = scipy.sparse.vstack([trainX] * repeat + [other_rides])
        else:
            trainX = np.vstack((np.tile(np.array(trainX).T,
                                        repeat).T, other_rides))

        assert (trainX.shape[0] == len(trainY))
        assert (testX.shape[0] == settings.SMALL_CHUNK_TEST)

        model = Model(trainX, trainY, driver_id)
        fold_predictions = model.predict(testX)
        for i, v in enumerate(test_fold):
            predictions[v] = fold_predictions[i]

    predictions = np.array(predictions)
    if settings.ENABLE_CACHE:
        util.cache_results(Model, get_data, driver_id, True, predictions,
                           repeat)
    return driver_id, predictions
예제 #10
0
def case_infectious_tracing():
    time = [5, 7.5, 10]
    p = [0.7, 1]
    data = get_results(
        simulation_infectious_tracing, (N, p[0], p[1], time),
        NUM_SIM,

    )
    states = [
        ["S"], ["E"], ["I1"], ["I2"], ["I3"], ["R"], ["D"], ["Q1", "Q2", "Q3"],
        ["QS", "QE"]
    ]
    labels = {
        "Q1": "Q_{I}", "QS": "Q_{S, E}",
        "I1": "I_{1}", "I2": "I_{2}", "I3": "I_{3}"
    }
    times, avg, std = fill_around_std(
        quolor_palette, data, 1, states=states, labels=labels
    )
    save_data([times, avg, std], (N, NUM_SIM, time, p))
예제 #11
0
def run_model(xxx_todo_changeme):
    (model_id, driver_id, Model, get_data, repeat) = xxx_todo_changeme
    testY = [1] * settings.SMALL_CHUNK + [0] * settings.SMALL_CHUNK

    if settings.ENABLE_CACHE:
        predictions = util.get_results(Model, get_data, driver_id, False,
                                       repeat)
        if predictions is not False:
            return predictions, testY

    multiplier = 4 if get_data in HEADING_DATA_FUNCTIONS else 1

    trainY = [1] * settings.BIG_CHUNK * multiplier * repeat + \
        [0] * settings.BIG_CHUNK * multiplier * repeat
    trainX, testX = get_data(model_id, driver_id, repeat)

    if type(trainX) in [
            scipy.sparse.csr.csr_matrix, scipy.sparse.coo.coo_matrix
    ]:
        trainX = scipy.sparse.vstack(
            [trainX[:settings.BIG_CHUNK * multiplier]] * repeat +
            [trainX[settings.BIG_CHUNK * multiplier:]])
    else:
        trainX = np.vstack((np.tile(
            np.array(trainX[:settings.BIG_CHUNK * multiplier]).T,
            repeat).T, trainX[settings.BIG_CHUNK * multiplier:]))

    assert (trainX.shape[0] == len(trainY))
    assert (testX.shape[0] == len(testY))

    model = Model(trainX, trainY, driver_id)
    predictions = model.predict(testX)

    if settings.ENABLE_CACHE:
        util.cache_results(Model, get_data, driver_id, False, predictions,
                           repeat)

    return predictions, testY
def test_model((model_id, driver_id, Model, get_data, repeat)):
  if settings.ENABLE_CACHE:
    predictions = util.get_results(Model, get_data, driver_id, True, repeat)
    if predictions is not False:
      return driver_id, predictions

  if get_data in HEADING_DATA_FUNCTIONS:
    return test_model_heading(model_id, driver_id, Model, get_data, repeat)

  rides, other_rides = get_data(model_id, driver_id, repeat, test=True)
  trainY = [1] * settings.BIG_CHUNK_TEST * repeat + [0] * settings.BIG_CHUNK_TEST * repeat
  kf = KFold(200, n_folds=settings.FOLDS, shuffle=True, random_state=driver_id)
  predictions = ['bug'] * 200
  for train_fold, test_fold in kf:
    trainX = rides[train_fold]
    testX = rides[test_fold]

    if type(trainX) in [scipy.sparse.csr.csr_matrix, scipy.sparse.coo.coo_matrix]:
      trainX = scipy.sparse.vstack([trainX] * repeat + [other_rides])
    else:
      trainX = np.vstack((
          np.tile(np.array(trainX).T, repeat).T,
          other_rides
      ))

    assert(trainX.shape[0] == len(trainY))
    assert(testX.shape[0] == settings.SMALL_CHUNK_TEST)

    model = Model(trainX, trainY, driver_id)
    fold_predictions = model.predict(testX)
    for i, v in enumerate(test_fold):
      predictions[v] = fold_predictions[i]

  predictions = np.array(predictions)
  if settings.ENABLE_CACHE:
    util.cache_results(Model, get_data, driver_id, True, predictions, repeat)
  return driver_id, predictions
예제 #13
0
def main():

    tasks = (simulate_queue(
        COPD,
        PROPS,
        NUM_SERVERS,
        origin,
        destination,
        prop_to_move,
        seed,
        MAX_TIME,
    ) for (origin, destination), prop_to_move, seed in get_params())

    with ProgressBar():
        queues = dask.compute(*tasks,
                              scheduler="processes",
                              num_workers=NUM_CORES)

    util_dfs, time_dfs = [], []
    for ((orgn, dest), move,
         seed), queue in tqdm.tqdm(zip(get_params(), queues)):
        utilisations, system_times = get_results(
            queue,
            MAX_TIME,
            origin=orgn,
            destination=dest,
            prop_to_move=move,
            seed=seed,
        )
        util_dfs.append(utilisations)
        time_dfs.append(system_times)

    utilisations = pd.concat(util_dfs)
    system_times = pd.concat(time_dfs)

    utilisations.to_csv(OUT_DIR / "utilisations.csv", index=False)
    system_times.to_csv(OUT_DIR / "system_times.csv", index=False)
예제 #14
0
파일: app.py 프로젝트: MaxCodeXTC/examples
import streamlit as st
from util import get_results, render_results

st.title("South Park Search")
st.write("Who said what?")

st.sidebar.title("Search with Jina")
query = st.sidebar.text_input("What do you wish to search?")
top_k = st.sidebar.slider("Top K", min_value=1, max_value=20, value=10)

if st.sidebar.button("Search"):
    results = get_results(query=query, top_k=top_k)
    st.balloons()
    st.markdown(render_results(results))
예제 #15
0
    # info, modules = create_modules(blocks)

    darknet = DarkNet('yolov3.cfg')
    darknet.load_state_dict(torch.load('yolo-v3.pt'))
    # darknet.load_weights('yolov3.weights')
    darknet.eval()

    classes = load_classes('coco.names')

    img = pre_image('dog-cycle-car.png', 416)
    img = Variable(img)

    detections = darknet(img)

    result = get_results(detections.data, confidence=0.5, nms_conf=0.4)

    result = torch.clamp(result, 0., float(416))
    result = np.array(result)

    ##
    origin_image = Image.open('dog-cycle-car.png')
    origin_shape = origin_image.size
    origin_image = origin_image.resize([416, 416])
    draw = ImageDraw.Draw(origin_image)

    for i in range(result.shape[0]):
        res = result[i]
        print(classes[int(res[-1])])
        draw.rectangle((res[1], res[2], res[3], res[4]), outline=(255, 0, 0))