def initial_conditions(basic_prm, city_data, min_days, Julia, correction=1.0): """Fits data and define initial contidions of the SEIR model. """ population = city_data["estimated_population_2019"].iloc[0] confirmed = city_data["confirmed"] # Compute the new cases from the confirmed sum new_cases = confirmed.values[1:] - confirmed.values[:-1] # Use a mean in a week to smooth the data (specially to deal with weekends) observed_I = np.convolve(new_cases, np.ones(7, dtype=int), 'valid') / 7.0 # Now accumulate in the inf_window inf_window = int(round(basic_prm["tinf"])) observed_I = np.convolve(observed_I, np.ones(inf_window, dtype=int), 'valid') ndays = len(observed_I) if ndays >= min_days and sum(observed_I) > 0: observed_I /= population Julia.observed_I = correction*observed_I Julia.tinc = basic_prm["tinc"] Julia.tinf = basic_prm["tinf"] Julia.rep = basic_prm["rep"] Julia.eval('initialc = fit_initial(tinc, tinf, rep, observed_I)') S1 = Julia.initialc[0] E1 = Julia.initialc[1] I1 = Julia.initialc[2] R1 = Julia.initialc[3] return (S1, E1, I1, R1, ndays), observed_I, population else: raise ValueError("Not enough data for %s only %d days available" % (city_data["city"].iloc[0], len(observed_I)))
def initialize_worker(params, proc_num): global my_number my_number = proc_num #make new random seed, as seed+Process_number local_seed = int(params.seed)+my_number random.seed(local_seed) global jl from julia.api import Julia from julia import Base, Main from julia.Main import println, redirect_stdout #establish connection to julia print("Worker",proc_num,"connecting to Julia...\n") if params.sys_image is not None: jl = Julia(init_julia=False, sysimage=params.sys_image, julia=params.julia, compiled_modules=params.compiled_modules) else: jl = Julia(init_julia=False, julia=params.julia, compiled_modules=params.compiled_modules) if my_number == 0: print("Loading Circuitscape in Julia...\n") #jl.eval("using Pkg;") jl.eval("using Circuitscape; using Suppressor;") #Main.eval("stdout") load_data(params, my_number) return(local_seed)
def import_julia_and_robot_dance(): # To use PyJulia print('Loading Julia library...') from julia.api import Julia jl = Julia(compiled_modules=False) from julia import Main as Julia print('Loading Julia library... Ok!') print('Loading Robot-dance Julia module...') Julia.eval('include("robot_dance.jl")') print('Loading Robot-dance Julia module... Ok!')
def prepare_optimization(large_cities, population, initial_values, M, out, target, window=14, ndays=400, min_level=1.0, hammer_duration=14, hammer_level=0.89, force_dif=1): # Infected upper bound, it is larger in São Paulo. ncities = len(large_cities) if force_dif is 1: force_dif = np.ones((ncities, ndays)) Julia.s1 = initial_values.loc[large_cities, "S0"].values Julia.e1 = initial_values.loc[large_cities, "E0"].values Julia.i1 = initial_values.loc[large_cities, "I0"].values Julia.r1 = initial_values.loc[large_cities, "R0"].values Julia.out = out.values Julia.M = M.values.copy() Julia.population = population.values.copy() Julia.ndays = ndays Julia.target = target Julia.hammer_duration = hammer_duration Julia.hammer_level = hammer_level Julia.min_level = min_level Julia.force_dif = force_dif if window == 1: Julia.eval(""" prm = SEIR_Parameters(ndays, s1, e1, i1, r1, out, sparse(M), sparse(M')) m = control_multcities(prm, population, target, force_dif, hammer_duration, hammer_level, min_level) """) else: Julia.window = window Julia.eval(""" prm = SEIR_Parameters(ndays, s1, e1, i1, r1, out, sparse(M), sparse(M')) m = window_control_multcities(prm, population, target, window, force_dif, hammer_duration, hammer_level, min_level); """)
def save_result(cities_names, filename): """Save the result of a run for further processing. """ Julia.eval( "s = value.(m[:s]); e = value.(m[:e]); i = value.(m[:i]); r = value.(m[:r])" ) Julia.eval("rt = expand(value.(m[:rt]), prm)") df = [] for i in range(len(cities_names)): c = cities_names[i] df.append([c, "s"] + list(Julia.s[i, :])) df.append([c, "e"] + list(Julia.e[i, :])) df.append([c, "i"] + list(Julia.i[i, :])) df.append([c, "r"] + list(Julia.r[i, :])) df.append([c, "rt"] + list(Julia.rt[i, :])) df = pd.DataFrame(df, columns=["City", "Variable"] + list(range(len(Julia.s[0, :])))) df.set_index(["City", "Variable"], inplace=True) return df.to_csv(filename)
def prepare_optimization(basic_prm, cities_data, mob_matrix, target, force_dif=1): ncities, ndays = len(cities_data.index), int(basic_prm["ndays"]) if force_dif is 1: force_dif = np.ones((ncities, ndays)) Julia.tinc = basic_prm["tinc"] Julia.tinf = basic_prm["tinf"] Julia.rep = basic_prm["rep"] Julia.s1 = cities_data["S1"].values Julia.e1 = cities_data["E1"].values Julia.i1 = cities_data["I1"].values Julia.r1 = cities_data["R1"].values Julia.population = cities_data["population"].values Julia.out = mob_matrix["out"].values Julia.M = mob_matrix.values[:, :-1] Julia.ndays = ndays Julia.target = target.values Julia.hammer_duration = int(basic_prm["hammer_duration"]) Julia.hammer_level = basic_prm["hammer_level"] Julia.min_level = basic_prm["min_level"] Julia.force_dif = force_dif if basic_prm["window"] == 1: Julia.eval(""" prm = SEIR_Parameters(tinc, tinf, rep, ndays, s1, e1, i1, r1, 1, out, sparse(M), sparse(M')) m = control_multcities(prm, population, target, force_dif, hammer_duration, hammer_level, min_level) """) else: Julia.window = basic_prm["window"] Julia.eval(""" prm = SEIR_Parameters(tinc, tinf, rep, ndays, s1, e1, i1, r1, window, out, sparse(M), sparse(M')) m = window_control_multcities(prm, population, target, force_dif, hammer_duration, hammer_level, min_level); """)
def optimize_and_show_results(i_fig, rt_fig, data_file, large_cities): """Optimize and save figures and data for further processing. """ Julia.eval(""" optimize!(m) rt = value.(m[:rt]); i = value.(m[:i]) """) for i in range(len(large_cities)): plt.plot(Julia.rt[i, :], label=large_cities[i], lw=5, alpha=0.5) plt.legend() plt.title("Target reproduction rate") plt.savefig(rt_fig) plt.clf() for i in range(len(large_cities)): plt.plot(Julia.i[i, :], label=large_cities[i]) plt.legend() plt.title("Infection level") plt.savefig(i_fig) save_result(large_cities, data_file)
def initial_conditions(city, covid_data, covid_window, min_days, Julia, correction=1.0): """Fits data and define initial contidions of the SEIR model. """ # Gets the city data city_data = covid_data[covid_data["city"] == city].copy() city_data.reset_index(inplace=True) city_data.sort_values(by=["date"], inplace=True) population = city_data["estimated_population_2019"].iloc[0] confirmed = city_data["confirmed"] # I am computing the new cases instead of using the new_confirmed column because # there is error at least in the first element for São Paulo. It should be 1. new_cases = confirmed.values[1:] - confirmed.values[:-1] new_cases = np.append(confirmed[0], new_cases) city_data["new_cases"] = new_cases observed_I = city_data["new_cases"].rolling(covid_window).sum() observed_I[:covid_window] = confirmed[:covid_window] ndays = len(observed_I) if ndays >= min_days: observed_I /= population Julia.observed_I = correction * observed_I.values Julia.eval('initialc = fit_initial(observed_I)') S0 = Julia.initialc[0] E0 = Julia.initialc[1] I0 = Julia.initialc[2] R0 = Julia.initialc[3] return (S0, E0, I0, R0, ndays), observed_I else: raise ValueError("Not enough data for %s only %d days available" % (city, len(observed_I)))
def prepare_optimization(basic_prm, cities_data, mob_matrix, target, hammer_data, force_dif=1, verbosity=0): ncities, ndays = len(cities_data.index), int(basic_prm["ndays"]) if force_dif is 1: force_dif = np.ones((ncities, ndays)) Julia.tinc = basic_prm["tinc"] Julia.tinf = basic_prm["tinf"] Julia.rep = basic_prm["rep"] Julia.s1 = cities_data["S1"].values Julia.e1 = cities_data["E1"].values Julia.i1 = cities_data["I1"].values Julia.r1 = cities_data["R1"].values Julia.population = cities_data["population"].values Julia.out = mob_matrix["out"].values Julia.M = mob_matrix.values[:, :-1] Julia.ndays = ndays Julia.target = target.values Julia.min_level = basic_prm["min_level"] Julia.force_dif = force_dif Julia.hammer_duration = hammer_data["duration"].values Julia.hammer_level = hammer_data["level"].values Julia.verbosity = verbosity if basic_prm["window"] == 1: Julia.eval(""" prm = SEIR_Parameters(tinc, tinf, rep, ndays, s1, e1, i1, r1, 1, out, sparse(M), sparse(M')) m = control_multcities(prm, population, target, force_dif, hammer_duration, hammer_level, min_level, verbosity) """) else: Julia.window = basic_prm["window"] Julia.eval(""" prm = SEIR_Parameters(tinc, tinf, rep, ndays, s1, e1, i1, r1, window, out, sparse(M), sparse(M')) m = window_control_multcities(prm, population, target, force_dif, hammer_duration, hammer_level, min_level, verbosity); """) # Check if there is a ramp parameter (delta_rt_max) # If so, add ramp constraints to the model if 'delta_rt_max' in basic_prm: Julia.delta_rt_max = basic_prm["delta_rt_max"] Julia.verbosity = verbosity Julia.eval(""" m = add_ramp(m, prm, hammer_duration, delta_rt_max, verbosity) """)
def save_result(basic_prm, cities_data, target, filename): """Save the result of a run for further processing. """ cities_names = cities_data.index n_cities = len(cities_names) Julia.eval( "s = value.(m[:s]); e = value.(m[:e]); i = value.(m[:i]); r = value.(m[:r])" ) Julia.eval("rt = expand(value.(m[:rt]), prm)") n = len(Julia.s[0, :]) Julia.eval("test = value.(m[:test])") df = [] for i in range(n_cities): c = cities_names[i] df.append([c, "s"] + list(Julia.s[i, :])) df.append([c, "e"] + list(Julia.e[i, :])) df.append([c, "i"] + list(Julia.i[i, :])) df.append([c, "r"] + list(Julia.r[i, :])) df.append([c, "rt"] + list(Julia.rt[i, :])) df.append([c, "rel. test"] + list(Julia.test[i, :])) df.append([c, "test"] + list(Julia.test[i, :] * cities_data.loc[c, "population"])) # Information on ICU icu_capacity = cities_data.loc[c, "population"] * cities_data.loc[ c, "icu_capacity"] df.append([c, "icu_capacity"] + list(icu_capacity * np.ones(n))) icu_target = icu_capacity * target.loc[c, :] df.append([c, "target_icu"] + list(icu_target)) rho_icu = SimpleTimeSeries(*cities_data.iloc[i, 7:-2]) confidence = cities_data.loc[c, "confidence"] mean_icu, upper_icu = rho_icu.get_upper_bound(n, confidence) df.append([c, "mean_rho_icu"] + list(mean_icu)) df.append([c, "upper_rho_icu"] + list(upper_icu)) mean_icu = cities_data.loc[c, "time_icu"] / basic_prm[ "tinf"] * mean_icu * cities_data.loc[c, "population"] * Julia.i[i, :] df.append([c, "mean_used_icu"] + list(mean_icu)) upper_icu = cities_data.loc[c, "time_icu"] / basic_prm[ "tinf"] * upper_icu * cities_data.loc[c, "population"] * Julia.i[i, :] df.append([c, "upper_used_icu"] + list(upper_icu)) df = pd.DataFrame(df, columns=["City", "Variable"] + list(range(len(Julia.s[0, :])))) df.set_index(["City", "Variable"], inplace=True) df.to_csv(filename) return df
from julia.api import Julia from julia import Main jl = Julia(compiled_modules=False) jl.eval('include("fn.jl")') import numpy x = numpy.array([[1, 2, 3], [4, 5, 6]], dtype=np.float32) print(x) res = Main.fn(x) print(x)
"Install julia using 'pip install julia==0.5.6'" "Available for Python>=3.4 and Python <= 3.8") logger.error(error_message) raise DrumCommonException(error_message) ## need better invocation here try: jl = Julia(sysimage=JL_SYSIMAGE_PATH, init_julia=JL_INIT) except Exception as error_message: logger.error(error_message) jl = Julia(init_julia=JL_INIT) logger.info("Julia ready!") from julia import Base logger.info(f"julia was started with {Base.julia_cmd()}") jl.eval(f'using Pkg; Pkg.activate("{JL_PROJECT}"); Pkg.instantiate()') class JlPredictor(BaseLanguagePredictor): def __init__(self, ): super(JlPredictor, self).__init__() def configure(self, params): super(JlPredictor, self).configure(params) logger.info(f"loading {JL_SCORE_PATH}") jl.eval(f'include("{JL_SCORE_PATH}")') logger.info(f"{JL_SCORE_PATH} loaded") from julia import Main Main.init(self._code_dir, self._target_type.value) self._model = Main.load_serialized_model(self._code_dir)
def optimize_and_show_results(i_fig, rt_fig, data_file, large_cities, verbosity=0): """Optimize and save figures and data for further processing. """ if verbosity >= 1: print('Solving Robot-dance...') Julia.eval(""" optimize!(m) pre_rt = value.(m[:rt]); i = value.(m[:i]) rt = expand(pre_rt, prm) """) if verbosity >= 1: print('Solving Robot-dance... Ok!') if verbosity >= 1: print('') print('-----') print('Number of rt changes in each city') for (i, c) in enumerate(large_cities): changes_rt = len(np.diff(Julia.rt[i]).nonzero()[0]) + 1 print(f'{c}: {changes_rt}') print('-----') print('') print('-----') print('Average fraction of infected') for (i, c) in enumerate(large_cities): i_avg = sum(Julia.i[i]) / len(Julia.i[i]) print(f'{c}: {i_avg}') print('-----') print('') print('-----') print('Number of days open (rt = 2.5)') for (i, c) in enumerate(large_cities): rt = Julia.rt[i] inds = np.nonzero(rt >= 2.4)[0] count_open_total = len(inds) thresh_open = np.nonzero(np.diff(inds) > 1)[0] + 1 thresh_open = np.insert(thresh_open, 0, 0) thresh_open = np.append(thresh_open, len(inds)) count_open = np.diff(thresh_open) print(f'{c}: {count_open_total} days total') for (i, n) in enumerate(count_open): print(f'Opening {i+1}: {n} days') print(f'Average: {np.mean(count_open):.0f} days') print('') print('-----') print('') print('-----') print('Number of days in lockdown (rt <= 1.1)') for (i, c) in enumerate(large_cities): rt = Julia.rt[i] inds = np.nonzero(rt <= 1.1)[0] count_open_total = len(inds) thresh_open = np.nonzero(np.diff(inds) > 1)[0] + 1 thresh_open = np.insert(thresh_open, 0, 0) thresh_open = np.append(thresh_open, len(inds)) count_open = np.diff(thresh_open) print(f'{c}: {count_open_total} days total') for (i, n) in enumerate(count_open): print(f'Lockdown {i+1}: {n} days') print(f'Average: {np.mean(count_open):.0f} days') print('') print('-----') # Before saving anything, check if directory exists # Lets assume all output files are in the same directory dir_output = path.split(i_fig)[0] if not path.exists(dir_output): os.makedirs(dir_output) if verbosity >= 1: print('Plotting charts...') for i in range(len(large_cities)): plt.plot(Julia.rt[i, :], label=large_cities[i], lw=5, alpha=0.5) plt.legend() plt.title("Target reproduction rate") plt.savefig(rt_fig) plt.clf() for i in range(len(large_cities)): plt.plot(Julia.i[i, :], label=large_cities[i]) plt.legend() plt.title("Infection level") plt.savefig(i_fig) if verbosity >= 1: print('Plotting charts... Ok!') if verbosity >= 1: print('Saving output files...') save_result(large_cities, data_file) if verbosity >= 1: print('Saving output files... Ok!')
def check_error_optim(basic_prm, cities_data, mob_matrix, dir_output, verbosity=0): """ Checks error between optimization and simulation """ ncities, ndays = len(cities_data.index), int(basic_prm["ndays"]) M = mob_matrix.values[:, :-1] out = mob_matrix["out"].values tspan = (1, ndays) teval = range(1, ndays + 1) y0 = cities_data["S1"].values y0 = np.append(y0, cities_data["E1"].values) y0 = np.append(y0, cities_data["I1"].values) y0 = np.append(y0, cities_data["R1"].values) Julia.eval( "s = value.(m[:s]); e = value.(m[:e]); i = value.(m[:i]); r = value.(m[:r])" ) Julia.eval("rt = expand(value.(m[:rt]), prm)") t_in = teval rt_in = Julia.rt if verbosity >= 1: print('Simulating robot-dance control...') sol = solve_ivp(_robot_dance_simul, tspan, y0, t_eval=teval, args=(basic_prm["tinc"], \ basic_prm["tinf"], \ ncities, \ M, \ out, \ t_in, \ rt_in)) if verbosity >= 1: print('Simulating robot-dance control... Ok!') s_sim = sol.y[:ncities] e_sim = sol.y[ncities:2 * ncities] i_sim = sol.y[2 * ncities:3 * ncities] r_sim = sol.y[3 * ncities:] if verbosity >= 1: print('Plotting errors...') for (i, c) in enumerate(cities_data.index): fig = plt.figure() plt.plot(Julia.s[i], label="robot-dance") plt.plot(s_sim[i], label="simulation") plt.legend() plt.title(f'{c}, Susceptible') plt.savefig(f'{dir_output}/{c}_s.png') fig = plt.figure() plt.plot(Julia.e[i], label="robot-dance") plt.plot(e_sim[i], label="simulation") plt.legend() plt.title(f'{c}, Exposed') plt.savefig(f'{dir_output}/{c}_e.png') fig = plt.figure() plt.plot(Julia.i[i], label="robot-dance") plt.plot(i_sim[i], label="simulation") plt.legend() plt.title(f'{c}, Infected') plt.savefig(f'{dir_output}/{c}_i.png') fig = plt.figure() plt.plot(Julia.r[i], label="robot-dance") plt.plot(r_sim[i], label="simulation") plt.legend() plt.title(f'{c}, Removed') plt.savefig(f'{dir_output}/{c}_r.png') if verbosity >= 1: print('Plotting errors... Ok!') fig = plt.figure() for (i, c) in enumerate(cities_data.index): plt.plot(rt_in[i], label=c) plt.legend() plt.grid() plt.title('Control rt') plt.savefig(f'{dir_output}/rt.png') rt_diff = [] for (i, c) in enumerate(cities_data.index): rt_diff.append(np.diff(rt_in[i])) fig = plt.figure() for (i, c) in enumerate(cities_data.index): plt.plot(rt_diff[i], label=c) plt.legend() plt.grid() plt.title('Diff rt') plt.savefig(f'{dir_output}/diff_rt.png') plt.show() if verbosity >= 1: print('Saving errors table...') df = pd.DataFrame(columns=[ 's_norm_1', 'e_norm_1', 'i_norm_1', 'r_norm_1', 's_norm_inf', 'e_norm_inf', 'i_norm_inf', 'r_norm_inf' ], index=cities_data.index) for (i, c) in enumerate(cities_data.index): df.loc[c, 's_norm_1'] = np.linalg.norm(s_sim[i] - Julia.s[i], ord=1) df.loc[c, 'e_norm_1'] = np.linalg.norm(e_sim[i] - Julia.e[i], ord=1) df.loc[c, 'i_norm_1'] = np.linalg.norm(i_sim[i] - Julia.i[i], ord=1) df.loc[c, 'r_norm_1'] = np.linalg.norm(r_sim[i] - Julia.r[i], ord=1) df.loc[c, 's_norm_inf'] = np.linalg.norm(s_sim[i] - Julia.s[i], ord=np.inf) df.loc[c, 'e_norm_inf'] = np.linalg.norm(e_sim[i] - Julia.e[i], ord=np.inf) df.loc[c, 'i_norm_inf'] = np.linalg.norm(i_sim[i] - Julia.i[i], ord=np.inf) df.loc[c, 'r_norm_inf'] = np.linalg.norm(r_sim[i] - Julia.r[i], ord=np.inf) df.to_csv(f'{dir_output}/error_discretization.csv') if verbosity >= 1: print('Saving errors table... Ok!')
def prepare_optimization(basic_prm, cities_data, mob_matrix, target, hammer_data, force_dif=1, pools=None, verbosity=0, test_budget=0, tests_off=[], tau=3, test_efficacy=0.8, daily_tests=0, proportional_tests=False): ncities, ndays = len(cities_data.index), int(basic_prm["ndays"]) if force_dif is 1: force_dif = np.ones((ncities, ndays)) # Chage ratios in matrix Mt to be in respect to the origin population = cities_data["population"].values Mt = mob_matrix.values[:, :-1] Mt = (Mt.T).copy() for c in range(ncities): for k in range(ncities): Mt[k, c] *= population[k] / population[c] Julia.tinc = basic_prm["tinc"] Julia.tinf = basic_prm["tinf"] Julia.alternate = basic_prm["alternate"] Julia.rep = basic_prm["rep"] Julia.s1 = cities_data["S1"].values Julia.e1 = cities_data["E1"].values Julia.i1 = cities_data["I1"].values Julia.r1 = cities_data["R1"].values Julia.availICU = cities_data["icu_capacity"] Julia.time_icu = basic_prm["time_icu"] Julia.rho_icu_ts = cities_data.iloc[:, 7:-1].values Julia.population = population Julia.out = mob_matrix["out"].values Julia.M = mob_matrix.values[:, :-1] Julia.Mt = Mt Julia.ndays = ndays Julia.target = target.values Julia.min_level = basic_prm["min_level"] Julia.force_dif = force_dif Julia.hammer_duration = hammer_data["duration"].values Julia.hammer_level = hammer_data["level"].values Julia.verbosity = verbosity Julia.window = basic_prm["window"] Julia.test_budget = test_budget Julia.tests_off = tests_off Julia.tau = tau Julia.test_efficacy = test_efficacy Julia.daily_tests = daily_tests Julia.proportional_tests = proportional_tests if pools is None: Julia.eval("pools = [[c] for c in 1:length(s1)]") else: Julia.pools = pools Julia.eval(""" prm = SEIR_Parameters(tinc, tinf, rep, ndays, s1, e1, i1, r1, alternate, availICU, time_icu, rho_icu_ts, window, out, sparse(M), sparse(Mt)) m = window_control_multcities(prm, population, target, force_dif, hammer_duration, hammer_level, min_level, pools, verbosity, test_budget, tests_off, tau, test_efficacy, daily_tests, proportional_tests); """) # Check if there is a ramp parameter (delta_rt_max) # If so, add ramp constraints to the model if 'delta_rt_max' in basic_prm: Julia.delta_rt_max = basic_prm["delta_rt_max"] Julia.verbosity = verbosity Julia.eval(""" m = add_ramp(m, prm, hammer_duration, delta_rt_max, verbosity) """)
import numpy print(numpy.__file__) print(numpy.__version__) import julia print(julia.__file__) print(julia.__version__) from julia.api import Julia print('Initializing Julia (this might take a moment the first time)...') jl = Julia(compiled_modules=False) print('Done!') # Needs to come after the creation of the Julia instance above from julia import Main jl.eval('include("return_array.jl")') res = Main.fn() print(type(res)) print(res) print('item 0 =', type(res[0]))
import os import os.path as path from optparse import OptionParser import pandas as pd import numpy as np import pylab as plt from pylab import rcParams rcParams['figure.figsize'] = 14, 7 import prepare_data # To use PyJulia from julia.api import Julia jl = Julia(compiled_modules=False) from julia import Main as Julia Julia.eval('ENV["OMP_NUM_THREADS"] = 8') Julia.eval('include("robot_dance.jl")') def get_options(): '''Get options with file locations from command line. ''' parser = OptionParser() parser.add_option( "--basic_parameters", dest="basic_prm", default=path.join("data", "basic_parameters.csv"), help="Basic parameters of the SEIR model [default: %default]") parser.add_option( "--cities_data", dest="cities_data",
scene = bpy.context.scene from julia.api import Julia print('Initializing Julia (this might take a moment the first time)...') # The compiled_modules option is to work around the fact that libpython # is linked statically in Blender. # https://pyjulia.readthedocs.io/en/latest/troubleshooting.html#your-python-interpreter-is-statically-linked-to-libpython jl = Julia(compiled_modules=False) print('Done!') # Needs to come after the creation of the Julia instance above from julia import Main jl.eval('include("catmull-clark.jl")') # Delete previous output mesh if 'subdivided' in bpy.data.objects: bpy.ops.object.select_all(action='DESELECT') bpy.data.objects['subdivided'].select_set(True) bpy.ops.object.delete() # # Get current mesh data # #obj = bpy.data.objects['Cube'] obj = bpy.context.active_object if obj is None:
comm = pympi.COMM_WORLD # Activate the desired julia environment jl.using("Pkg") from julia import Pkg Pkg.activate(".") # Make julia global space available from julia import Main jl.using("MPI") from julia import MPI as jlmpi # Initialize Julia MPI without initializing the libmpi--this is part of the # initialization done by MPI.Init() in MPI.jl jl.eval( 'function init_mpi(); for f in MPI.mpi_init_hooks; f(); end; return; end;') Main.init_mpi() # Convert pympi comm to jlmpi comm Main.handle = pympi._handleof(comm) # make handle accessible to julia jl.eval('comm = MPI.Comm(MPI.MPI_Comm(handle))') # create julia comm # WARNING: You might think that we could use a statement like # Main.comm = jlmpi.Comm(jlmpi.MPI_Comm(pympi._handleof(comm))) # to turn the python MPI comm into a julia MPI comm instead of the above `eval`. # However, this will fail when using MPICH (it works with OpenMPI). The reason # is that MPICH uses integers to differentiate MPI comms (OpenMPI uses raw # pointers) . So for MPICH, `jlmpi.MPI_Comm(pympi._handleof(comm))` returns a # `Cint` (which is a specialized julia Int32 for interfacing with C/Fortran # libraries). When it comes back to python, it is converted to a python `int` # which is then converted to a Julia Int64 when given to `jlmpi.Comm` as an
# Activate the desired julia environment jl.using('Pkg') from julia import Pkg Pkg.activate(".") jl.using('MPI') jl.using('Random') # for seed! function jl.using('Statistics') # for mean function from julia import Main from julia import MPI as jlmpi # Convert pympi comm to jlmpi comm Main.handle = pympi._handleof(comm) # make handle accessible to julia jl.eval('comm = MPI.Comm(MPI.MPI_Comm(handle))') # create julia comm # WARNING: You might think that we could use a statement like # Main.comm = jlmpi.Comm(jlmpi.MPI_Comm(pympi._handleof(comm))) # to turn the python MPI comm into a julia MPI comm instead of the above `eval`. # However, this will fail when using MPICH (it works with OpenMPI). The reason # is that MPICH uses integers to differentiate MPI comms (OpenMPI uses raw # pointers) . So for MPICH, `jlmpi.MPI_Comm(pympi._handleof(comm))` returns a # `Cint` (which is a specialized julia Int32 for interfacing with C/Fortran # libraries). When it comes back to python, it is converted to a python `int` # which is then converted to a Julia Int64 when given to `jlmpi.Comm` as an # argument. The result is a type error. We can avoid this MPICH incompatibility # by using the above `eval` statement. # Initialize Julia MPI without initializing the libmpi--this is part of the # initialization done by MPI.Init() in MPI.jl
def optimize_and_show_results(basic_prm, figure_file, data_file, cities_data, target, verbosity=0): """Optimize and save figures and data for further processing. """ large_cities = cities_data.index population = cities_data["population"].values if verbosity >= 1: print('Solving Robot-dance...') Julia.eval(""" optimize!(m) pre_rt = value.(m[:rt]); i = value.(m[:i]) rt = expand(pre_rt, prm) test = value.(m[:test]) """) if verbosity >= 1: print('Solving Robot-dance... Ok!') print("Total tests used ", end="") print((Julia.test.T * population).sum()) bins = [0] bins.extend(plt.linspace(1.0, 0.95 * basic_prm["rep"], 5)) bins.append(basic_prm["rep"]) stats = pd.DataFrame(index=large_cities) changes_rt = [] for (i, c) in enumerate(large_cities): changes_rt.append(len(np.diff(Julia.rt[i]).nonzero()[0]) + 1) stats["Rt changes"] = changes_rt i_avg, max_i = [], [] for (i, c) in enumerate(large_cities): maximum = 100 * Julia.i[i, Julia.hammer_duration[i]:].max() average = 100 * sum(Julia.i[i]) / len(Julia.i[i]) max_i.append(f"{maximum:.3f}%") i_avg.append(f"{average:.3f}%") stats["Avg. I"] = i_avg stats["Max I"] = max_i total, mean = [], [] for (i, c) in enumerate(large_cities): rt = Julia.rt[i] inds = np.nonzero(rt >= bins[-2])[0] count_open_total = len(inds) thresh_open = np.nonzero(np.diff(inds) > 1)[0] + 1 thresh_open = np.insert(thresh_open, 0, 0) thresh_open = np.append(thresh_open, len(inds)) count_open = np.diff(thresh_open) total.append(count_open_total) mean.append(np.mean(count_open)) stats["Open"] = total stats["Mean open"] = mean total, mean = [], [] for (i, c) in enumerate(large_cities): rt = Julia.rt[i] inds = np.nonzero(rt < bins[2])[0] count_open_total = len(inds) thresh_open = np.nonzero(np.diff(inds) > 1)[0] + 1 thresh_open = np.insert(thresh_open, 0, 0) thresh_open = np.append(thresh_open, len(inds)) count_open = np.diff(thresh_open) total.append(count_open_total) mean.append(np.mean(count_open)) stats["Closed"] = total stats["Mean closed"] = mean if verbosity >= 2: print() print("Statistics") print(stats) # Before saving anything, check if directory exists # Lets assume all output files are in the same directory dir_output = path.split(figure_file)[0] if not path.exists(dir_output): os.makedirs(dir_output) if verbosity >= 1: print('Saving output files...') result = save_result(basic_prm, cities_data, target, data_file) if verbosity >= 1: print('Saving output files... Ok!') if verbosity >= 1: print("Ploting result...") name, extension = os.path.splitext(figure_file) figure_file = name + "-rt" + extension plot_result(basic_prm, result, figure_file, Julia.hammer_duration, cities_data["start_date"][0], type="rt") plt.savefig(figure_file, dpi=150, bbox_inches='tight') figure_file = name + "-test" + extension plot_result(basic_prm, result, figure_file, Julia.hammer_duration, cities_data["start_date"][0], type="test") plt.savefig(figure_file, dpi=150, bbox_inches='tight') if verbosity >= 1: print("Ploting result... OK!") return stats
import time, gc import numpy from julia.api import Julia from julia import Main jl = Julia(compiled_modules=False) jl.eval(""" include("return_array.jl") """) print('calling into Julia') res = Main.fn(123) print('Back in Python') for v in res: print(type(v)) print(v)
#!/usr/bin/env python3 from julia.api import Julia julia_file = 'calc_PR.jl' jl = Julia(compiled_modules=False) jl.eval('include("' + julia_file + '")')
import time, gc import numpy from julia.api import Julia from julia import Main jl = Julia(compiled_modules=False) jl.eval(""" include("fn.jl") import Base.convert """) print('allocating') print(gc.get_stats()) x = numpy.ones(200 * 1024 * 1024, 'float32') #x = numpy.array([1,2,3,4,5,6], dtype=numpy.float32) #print(x) time.sleep(5) print('calling into Julia') Main.fn(x) print('Back in Python') time.sleep(5)
the Reaction, ReactionSystem and ParticleSystem classes are defined the same way and expose a subset of the Python API. See the file cme.jl for information on the Julia implementation. """ import pdist import numpy as np from julia.api import Julia jul = Julia(compiled_modules=False) with open("cme.jl") as inpf: code = inpf.read() jul.eval(code) class Reaction: def __init__(self, rate, products=None): self.rate = rate self.products = products class GenReaction(Reaction): def __init__(self, rate, products=None): super().__init__(rate, products) def __str__(self): return "GenReaction(rate={}, products={})".format( self.rate, self.products)
from timeit import default_timer as timer import pylab as plt from pylab import rcParams rcParams['figure.figsize'] = 14, 7 print('Loading modules... Ok!') import prepare_data # To use PyJulia print('Loading Julia library...') from julia.api import Julia jl = Julia(compiled_modules=False) from julia import Main as Julia print('Loading Julia library... Ok!') print('Loading Robot-dance Julia module...') Julia.eval('include("robot_dance.jl")') print('Loading Robot-dance Julia module... Ok!') def get_options(): '''Get options with file locations from command line. ''' parser = OptionParser() parser.add_option( "--basic_parameters", dest="basic_prm", default=path.join("data", "basic_parameters.csv"), help="Basic parameters of the SEIR model [default: %default]") parser.add_option( "--cities_data", dest="cities_data",
import json import shutil from multiprocessing import Pool checkmethod = "DINCAE" #checkmethod = "DIVAnd" if len(sys.argv) > 1: checkmethod = sys.argv[1] print("checkmethod ", checkmethod) if checkmethod == "DIVAnd": from julia.api import Julia jl = Julia(compiled_modules=False) jl.eval('push!(LOAD_PATH,joinpath(ENV["HOME"],"projects/Julia/share"))') jl.eval('push!(LOAD_PATH,joinpath(ENV["HOME"],"src","CAE"))') from julia import dincae_insitu epochs = 5000 * 2 epochs = 300 #epochs = 50 #epochs = 5 #epochs = 1 reconstruct_params = { #"epochs": 1, #"epochs": 1_000 * 5 * 2, "epochs": epochs, #"epochs": 5, "batch_size": 12,
dmrgpath = os.path.dirname(os.path.realpath(__file__)) precompile = False # check the system image sysimage = os.environ["HOME"] + "/.julia/sysimages/sys_itensors.so" if not os.path.isfile(sysimage): print("No ITensors system image found, this may take some time") sysimage = None try: # create the executable from julia.api import Julia jlsession = Julia(compiled_modules=False, sysimage=sysimage) # start the Julia session jlsession.eval("using Suppressor") # suppress output except: print("Julia cannot be executed") def run(self): """Execute the Julia program""" import contextlib c = "@suppress_out include(\"" + dmrgpath + "/mpsjulia/mpsjulia.jl\");" self.execute(lambda: jlsession.eval(c)) # evaluate Julia def install(): """Install Julia and ITensor""" julia = "julia" # julia command os.system(julia + " --eval " + "\"import Pkg; Pkg.add(\\\"ITensors\\\")\"")
import numpy, ctypes import julia from julia.api import Julia jl = Julia() from julia import Main jl.eval('include("alter_array.jl")') a = numpy.array([1, 2, 3, 4, 5], 'uint32') print(a) addr = a.ctypes.data length = a.shape[0] Main.fn(a) print(a) Main.fn(addr, length) print(a)
import mpi4py from mpi4py import MPI as pympi import numpy as np # Activate the desired julia environment jl.using('Pkg') from julia import Pkg Pkg.activate(".") jl.using('MPI') from julia import Main from julia import MPI as jlmpi jl.eval('include("pi_func.jl")') # Initialize jlmpi stuff without initializing libmpi again -- # function definition is in pi_func.jl Main.init_mpi() size = pympi.COMM_WORLD.Get_size() if size > 1: denom = int(np.floor(pympi.COMM_WORLD.Get_size() / 2)) else: denom = 1 color = int(np.floor(pympi.COMM_WORLD.Get_rank() / denom)) key = pympi.COMM_WORLD.Get_rank() % denom comm = pympi.COMM_WORLD.Split(color=color, key=key) ## Convert pympi comm to jlmpi comm ##