def split_wav_file(input_wav): output_channel1 = tempfile.NamedTemporaryFile( delete=False, dir=utils.get_project_root() + "/data/tmp/") output_channel2 = tempfile.NamedTemporaryFile( delete=False, dir=utils.get_project_root() + "/data/tmp/") data = read(input_wav) write(output_channel1, data[0], np.array(zip(*data[1])[0])) write(output_channel2, data[0], np.array(zip(*data[1])[1])) return output_channel1.name, output_channel2.name
def plot_history(self): state_array = np.array(self.state_history) ### input_array = np.array(self.input_history) t_array = np.array(self.t_range) fig = plt.figure(figsize=(20, 10)) plt.plot(t_array, state_array[:, 0], linewidth=3, color='k', label='amplitude') plt.plot(t_array, input_array, linewidth=3, color='r', label='input') s_an = hilbert(state_array[:, 0]) protophase = np.angle(s_an - np.mean(s_an)) phase = extract_phase(protophase, stim_start_ind=int( np.where(np.diff(input_array) > 0)[0])) plt.plot(t_array, phase % (2 * np.pi), linewidth=1, ls='--', color='g', label='phase') plt.legend(fontsize=24) plt.xlabel("time, ms", fontsize=24) plt.ylabel("Amplitude", fontsize=24) plt.grid(True) root_folder = get_project_root() img_file = os.path.join(root_folder, "img", "Van_der_Pol_Oscillator.pdf") plt.savefig(img_file) plt.show(block=True) plt.close() return None
def run_prc_extraction_direct(data_folder, files, save_to, filter=True): data_phi = [] data_delta_phi = [] for i, file in tqdm(enumerate(files)): data = pickle.load(open(os.path.join(data_folder, file), "rb+")) signal = data["signal"] t = data["t"] inp = data["inp"] stim_start_ind = int(np.where(np.diff(inp) > 0)[0][0]) + 1 stim_duration_ind = (int(np.where(np.diff(inp) < 0)[0][0]) - int(np.where(np.diff(inp) > 0)[0][0])) try: # if there is some error we just through out this point protophase = extract_protophase(t, signal, stim_start_ind, filter=filter, psd_peak_width=0.3, prominence_thr=0.92) phase = extract_phase(protophase, stim_start_ind, n_bins=100, order=30) phi, delta_phi = get_phase_shift(t, phase, stim_start_ind, stim_duration_ind, transient_inds=500) data_phi.append(deepcopy(phi)) data_delta_phi.append(deepcopy(delta_phi)) except: pass #get the signal starting at phi = 0, ending up at phi = 2 pi: inds_zero_phase = (np.where(np.abs(np.diff(phase % (2 * np.pi))) > np.pi)[0]) ind1 = inds_zero_phase[0] + 1 ind2 = inds_zero_phase[1] prc_data = dict() prc_data['signal'] = signal[ind1:ind2] prc_data['dt'] = data['dt'] prc_data['phase'] = phase[ind1:ind2] % (2 * np.pi) prc_data['phi'] = data_phi prc_data['delta_phi'] = data_delta_phi root_folder = get_project_root() pickle.dump( prc_data, open(os.path.join(root_folder, "data", "processed_data", save_to), "wb+")) return None
def plot_history(self): state_array = np.array(self.state_history) input_array = np.array(self.input_history) t_array = np.array(self.t_range) fig = plt.figure(figsize=(20, 10)) plt.plot(t_array, state_array[:, 0], linewidth=3, color='k', label='amplitude') plt.plot(t_array, input_array, linewidth=3, color='r', label='input') plt.legend(fontsize=24) plt.xlabel("time, ms", fontsize=24) plt.ylabel("Amplitude", fontsize=24) plt.grid(True) root_folder = get_project_root() img_file = os.path.join(root_folder, "img", "Morris_Lecar_Neuron.pdf") plt.savefig(img_file) plt.show(block=True) plt.close() return None
def handle_recording(digits): # grabs recording url, and saves url to file along with some metadata # TODO file download should be done asynchronously recording_url = request.values.get("RecordingUrl", None) duration_secs = request.values.get("RecordingDuration", None) from_number = request.values.get('From', None) timestamp = datetime.datetime.now() - timedelta(seconds=int(duration_secs)) timestamp = timestamp.isoformat() data = json.dumps({"from": from_number, "to": str(digits), "recording_url": recording_url, "timestamp": timestamp}, ensure_ascii=False) fname = utils.get_project_root() + \ "/data/raw/%s%s%s.txt" % (str(timestamp), "|", str(from_number)) with open(fname, "w") as f: f.write(data) resp = twilio.twiml.Response() resp.say("Call recorded successfully") return str(resp)
def send_audio_segments_to_async_recognize(channels, sampling_r, access_token): # get timestamped-intervals for both channels channels_operations_timestamps = { } # will hold all objects that are being transcribed by google asyncronously for i, channel in enumerate(channels): channels_operations_timestamps["channel" + str(i)] = {} data = read(channel) intervals = audio_utils.intervals_from_signal(data[1], sampling_r) for start, stop in intervals: f = tempfile.NamedTemporaryFile(delete=False, dir=utils.get_project_root() + "/data/tmp/").name write(f, sampling_r, data[1][start:stop]) gs_url = copy_file_to_google_storage_and_delete(f) request_json = generate_speech_api_request_json(gs_url) operation_id = call_async_recognize(request_json, access_token) start_offset = start / float(sampling_r) stop_offset = stop / float(sampling_r) channels_operations_timestamps["channel" + str(i)][operation_id] = ( start_offset, stop_offset) return channels_operations_timestamps
def generate_data(M, params, model_name, stim_amp, stim_duration, num_trials): root_folder = get_project_root() dt = params["dt"] noise_lvl = params["noise_lvl"] tag = f"{dt}_{noise_lvl}_{stim_duration}_{stim_amp}" dir_name = f"{model_name}_{tag}" save_to = os.path.join(root_folder, "data", "runs", dir_name) create_dir_if_not_exist(save_to) file = os.path.join(root_folder, "data", "limit_cycles", f"{model_name}_limit_cycle.pkl") data_lim_cycle = pickle.load(open(file, "rb+")) T = data_lim_cycle["T"] initial_state = data_lim_cycle["x0"] for i in tqdm(range(num_trials)): M.reset() M.state = initial_state # run for 10 periods M.run(10 * T) # uniformly choose random time throughout the cycle to apply the stimulus delta_T = np.random.rand() * T M.run(delta_T) M.set_input(stim_amp) M.run(stim_duration) # run for the other 10 periods M.set_input(0) M.run(10 * T) data = dict() data['params'] = params data['signal'] = np.array(M.state_history).squeeze()[:, 0] ### data['signal_all'] = np.array(M.state_history).squeeze() data['dt'] = M.dt data['t'] = np.array(M.t_range).squeeze() data['inp'] = np.array(M.input_history).squeeze() file_name = os.path.join(save_to, f"{model_name}_data_{i}.pkl") pickle.dump(data, open(file_name, "wb+")) return None
import os import pickle import numpy as np from matplotlib import pyplot as plt from src.plots.plot_prc_experimental import plot_prc_experimental from src.utils.utils import get_project_root from src.models.Van_der_Pol_Oscillator import Van_der_Pol_Oscillator from src.models.Morris_Lecar_Neuron import Morris_Lecar_Neuron from src.models.Hodgkin_Huxley_Neuron import Hodgkin_Huxley_Neuron from src.models.Hindmarsh_Rose_Neuron import Hindmarsh_Rose_Neuron from src.models.Rossler_Oscillator import Rossler_Oscillator #### plotting exact PRC computed from full dynamics root_folder = get_project_root() # model_names = ['Rossler_Oscillator', 'Van_der_Pol_Oscillator',\ # 'Morris_Lecar_Neuron', 'Hodgkin_Huxley_Neuron', 'Hindmarsh_Rose_Neuron'] model_names = ['Van_der_Pol_Oscillator'] data_folder = os.path.join(root_folder, "data", "processed_data") for model_name in model_names: modifier = 'linear' # which prc to plot? dt = 0.005 noise_lvl = 0.002 stim_duration = 5 stimp_amp = 10 tag = f'{dt}_{noise_lvl}_{stim_duration*dt}_{stimp_amp}' prc_file = os.path.join(data_folder, f"{model_name}_{modifier}_{tag}_prc.pkl") data = pickle.load(open(prc_file, 'rb+')) phi = np.array(data["phi"]) % (2 * np.pi) delta_phi = data["delta_phi"]
def get_exact_PRC_qp(model_name): root_folder = get_project_root() file = os.path.join(root_folder, "data", "model_params", f"{model_name}_params.json") params = json.load(open(file, 'r')) model = eval(f"{model_name}(params)") root_folder = get_project_root() file = os.path.join(root_folder, "data", "limit_cycles", f"{model_name}_limit_cycle.pkl") data_lim_cycle = pickle.load(open(file, "rb+")) T = data_lim_cycle["T"] omega = (2 * np.pi) / T signal = data_lim_cycle["limit_cycle"] dt = data_lim_cycle["dt"] num_points = data_lim_cycle["num_points"] x0 = data_lim_cycle["x0"] params = data_lim_cycle["params"] t = np.arange(num_points + 1) * dt phase = t * omega T = data_lim_cycle["T"] N = signal.shape[1] # number of ``channels'' M = signal.shape[0] # time points # dz / dt = -D^T z is approximated by # z_{n+1} - z_{n} + 0.5 dt D^T(x_{n+1})Z_{n+1} + 0.5 dt D^T(x_{n})Z_{n} = 0 # C z = 0, where z has the shape N * M - vector of Z_{n} # C = (-I + 0.5 dt D^T(x_n)) + cyclically shifted up by one row (I + 0.5 dt D^T(x_{n+1})) # C has (N * M, N*M) dimensions # and the constraints Bz = omega # associated d phi/ dt = (F(t), Z(t)) = w # B is made out from F^T(x_{n}) and has (M, N*M) dimensions C_1 = np.zeros((N * M, N * M)) C_2 = np.zeros((N * M, N * M)) B = np.zeros((M, N * M)) for t_i in range(M): i_start = N * t_i i_finish = N * (t_i + 1) C_1[i_start:i_finish, i_start:i_finish] = -np.eye(N) + 0.5 * dt * model.jac_rhs( signal[t_i, :]).T C_2[i_start:i_finish, i_start:i_finish] = np.eye(N) + 0.5 * dt * model.jac_rhs(signal[ (t_i + 1) % M, :]).T B[t_i, i_start:i_finish] = model.rhs_(signal[t_i, :]) C = C_1 + np.roll(C_2, N, axis=1) # # Least norm solution (no hard constraints) using solver alpha = 1e-9 solvers.options['show_progress'] = True # (Cx - h)^T @ (Cx - h) = x^T C^T C x - 2 h^T C x + h^T h P = C.T @ C q = np.zeros(N * M) sol = solvers.qp(P=matrix(P), q=matrix(q), A=matrix(B), b=matrix(omega * np.ones(M))) x = np.array(sol['x']) Z = x.reshape(M, N) prc_exact_data = dict() prc_exact_data["delta_phi"] = Z[:, 0] prc_exact_data["Z"] = Z prc_exact_data["phi"] = phase prc_exact_data["signal"] = signal prc_exact_data["params"] = params prc_exact_data["omega"] = omega return prc_exact_data
def test_get_project_root(self): """Testing to be able to get the project root pathname """ assert "japan_horse_racing" == basename(utils.get_project_root())
def load_logs(pocket: Pocket): with open(get_project_root() / 'logs' / 'logs_debug.log', 'r') as f: logfile = f.readlines() logfile = ['[2021-' + i for i in '\n'.join(logfile).split('[2021-')] pocket.get(__name__)['log'] = logfile return logfile
import os import json from src.utils import utils from glob import glob import smtplib from email.MIMEMultipart import MIMEMultipart from email.MIMEText import MIMEText from src import transcribe_url_async_gs from src import html_from_transcript from dateutil.parser import parse email_file = utils.get_project_root() + "/src/credentials/email_account.json" email_account = json.loads(open(email_file).read()) def pop_oldest(dir, delete=False): # get the oldest raw json (FIFO) f = sorted(glob(dir + "/*.txt"), reverse=False)[0] fname = os.path.basename(f) j = json.loads(open(f).read()) if delete: os.remove(f) return fname, j def send_user_email(html, subj, dest): # send just html to user fromaddr = email_account["address"] msg = MIMEMultipart() msg['From'] = email_account["account_name"] msg['To'] = dest
def get_exact_PRC_fourier_qp(model_name, N_fourirer_components): root_folder = get_project_root() file = os.path.join(root_folder, "data", "model_params", f"{model_name}_params.json") params = json.load(open(file, 'r')) model = eval(f"{model_name}(params)") root_folder = get_project_root() file = os.path.join(root_folder, "data", "limit_cycles", f"{model_name}_limit_cycle.pkl") data_lim_cycle = pickle.load(open(file, "rb+")) T = data_lim_cycle["T"] omega = (2 * np.pi) / T signal = data_lim_cycle["limit_cycle"] dt = data_lim_cycle["dt"] num_points = data_lim_cycle["num_points"] x0 = data_lim_cycle["x0"] params = data_lim_cycle["params"] t = np.arange(num_points + 1) * dt phase = t * omega # # Least norm solution (no hard constraints) using solver N = signal.shape[0] # time points K = signal.shape[1] # dimensionality of the system M = N_fourirer_components # N fourier components def chi(omega, t, K, M): vect = [1] for m in range(M): vect.append(np.cos(omega * t * (m+1))) for m in range(M): vect.append(np.sin(omega * t * (m+1))) vect = np.array(vect) return np.kron(vect, np.eye(K)) def kappa(omega, t, K, M): vect = [0] for m in range(M): vect.append(-(m+1) * omega * np.sin(omega * t * (m+1))) for m in range(M): vect.append((m+1) * omega* np.cos(omega * t * (m+1))) vect = np.array(vect) return np.kron(vect, np.eye(K)) F_matrix = np.zeros((N, K * N)) for i in range(N): F_matrix[i, i * K : (i+1) * K] = model.rhs_(signal[i, :]) D_matrix = np.zeros((N*K, N*K)) for i in range(N): D_matrix[i * K : (i+1) * K, i * K : (i+1) * K] = model.jac_rhs(signal[i, :]).T kappa_matrix = np.vstack([kappa(omega, t[i], K, M).reshape(-1, K*(2*M+1)) for i in range(N)]) chi_matrix = np.vstack([chi(omega, t[i], K, M).reshape(-1, K*(2*M+1)) for i in range(N)]) P = kappa_matrix + D_matrix @ chi_matrix Q = F_matrix @ chi_matrix G = np.vstack([Q, P]) h = np.hstack([omega * np.ones(N), np.zeros(N*K)]) q = -(h.reshape(1, -1) @ G).squeeze() solvers.options['show_progress'] = True sol = solvers.qp(P=matrix(G.T @ G + 1e-9 * np.eye(K*(2*M+1))), q=matrix(q)) solvers.options['show_progress'] = True x = np.array(sol['x']) Z = (chi_matrix @ x).reshape(-1, K) prc_exact_data = dict() prc_exact_data["delta_phi"] = Z[:, 0] prc_exact_data["Z"] = Z prc_exact_data["phi"] = phase prc_exact_data["signal"] = signal prc_exact_data["params"] = params prc_exact_data["omega"] = omega return prc_exact_data
import tempfile import requests import sys import json import subprocess import argparse from src.utils import audio_utils from src.utils import service_utils from src.utils import utils import os import time from scipy.io.wavfile import read from scipy.io.wavfile import write # "https://cloud.google.com/speech/docs/getting-started" key_file = utils.get_project_root() + "/src/credentials/google_cloud_key.json" key_obj = json.loads(open(key_file).read()) proj_id = str(key_obj["project_id"]) def copy_file_to_google_storage_and_delete(f): subprocess.check_output("sudo gsutil cp %s gs://%s.appspot.com/" % (f, proj_id), shell=True) os.remove(f) return "gs://" + str(proj_id) + "s.appspot.com/" + os.path.basename(f) def generate_speech_api_request_json(uri): request_json = { 'config': {