def getData(self, filepath):
     octave.eval(
         "load('" + filepath +
         "', '-mat')")  #todo: separate octave instanz für jeden user
     data = octave.pull('Data')
     #todo: fehler check + memoisierung
     return data
Esempio n. 2
0
    def setUp(self):
        octave.restart()
        octave.addpath('./octave')
        octave.addpath('./test/octave')
        octave.eval('pkg load signal')  # load signal package

        # Test data
        v = 1/np.sqrt(2)

        self.in1 = np.array([1, 0, 0, 0, 0, 0, 0, 0], dtype=np.complex64)
        self.out1 = np.array([1, 1, 1, 1, 1, 1, 1, 1], dtype=np.complex64)

        self.in2 = np.array([0, 1, 0, 0, 0, 0, 0, 0], dtype=np.complex64)
        self.out2 = np.array([1, v-1j*v, -1j, -v-1j*v, -1, -v+1j*v, 1j, v+1j*v], dtype=np.complex64)

        self.in3 = np.array([0, 0, 1, 0, 0, 0, 0, 0], dtype=np.complex64)
        self.out3 = np.array([1, -1j, -1, 1j, 1, -1j, -1, 1j], dtype=np.complex64)

        self.in4 = np.array([0, 0, 0, 1, 0, 0, 0, 0], dtype=np.complex64)
        self.out4 = np.array([1, -v-1j*v, 1j, v-1j*v, -1, v+1j*v, -1j, -v+1j*v], dtype=np.complex64)

        self.in5 = np.array([0, 0, 0, 0, 1, 0, 0, 0], dtype=np.complex64)
        self.out5 = np.array([1, -1, 1, -1, 1, -1, 1, -1], dtype=np.complex64)

        self.in6 = np.array([0, 0, 0, 0, 0, 1, 0, 0], dtype=np.complex64)
        self.out6 = np.array([1, -v+1j*v, -1j, v+1j*v, -1, v-1j*v, 1j, -v-1j*v], dtype=np.complex64)

        self.in7 = np.array([0, 0, 0, 0, 0, 0, 1, 0], dtype=np.complex64)
        self.out7 = np.array([1, 1j, -1, -1j, 1, 1j, -1, -1j], dtype=np.complex64)

        self.in8 = np.array([0, 0, 0, 0, 0, 0, 0, 1], dtype=np.complex64)
        self.out8 = np.array([1, v+1j*v, 1j, -v+1j*v, -1, -v-1j*v, -1j, v-1j*v], dtype=np.complex64)
Esempio n. 3
0
 def test_findpeaks_callable(self):
     """ Check we can call the Octave findpeaks from Python using oct2py. """
     # Load signal packageself.
     octave.eval("pkg load signal")
     (pks, loc) = octave.findpeaks(np.array([0, 2, 4, 9, 5, 3, 6, 11, 5, 1, 6]))
     self.assertEquals(pks[0].tolist(), [11, 9])
     self.assertEquals(loc[0].tolist(), [8, 4])
Esempio n. 4
0
    def set_data_matlab(self, bind : str = None) -> None :
        """
            Get data from matlab file format
        """
        try :
            oct.eval("cd .")
            self._raw_data = oct.feval(bind)
        except :
            print("Please install oct2py and its dependencies if you want to use set_data_matlab()")
            raise
        
        self._network = Network()
        buses = []
        for i in range(0, self._raw_data["bus"].shape[0]):
            buses.append(Bus(data = self._raw_data["bus"][i]))

        ### merge "gen" and "gencost" matrix 
        self._raw_data["all_gen"] = np.concatenate((self._raw_data["gen"],self._raw_data["gencost"]),axis=1)

        ### Adding generators
        for i in range(0, self._raw_data["gen"].shape[0]):
            buses[int(self._raw_data["gen"][i][0] - 1)].add_generator(data = self._raw_data["all_gen"][i])
        
        ### Adding buses
        self._network.add_buses(buses)

        ### Adding branches
        for i in range(0,self._raw_data["branch"].shape[0]):
            self._network.add_branch(Branch(self._raw_data["branch"][i]))
Esempio n. 5
0
    def setUp(self):
        octave.restart()
        octave.addpath('./octave')
        octave.addpath('./test/octave')
        octave.eval('pkg load signal')  # load signal package

        # Test data
        self.in1 = np.ones(8, dtype=np.complex64)
        self.out1 = np.copy(self.in1)
Esempio n. 6
0
def test_stoi_upsample():
    """ Test STOI at sampling frequency above 10 kHz. """
    for fs in [8000]:
        x = np.random.randn(2 * fs)
        y = np.random.randn(2 * fs)
        octave.eval('pkg load signal')
        stoi_out = stoi(x, y, fs)
        stoi_out_m = octave.feval('octave/stoi.m', x, y, float(fs))
        assert_allclose(stoi_out, stoi_out_m, atol=ATOL, rtol=RTOL)
Esempio n. 7
0
def test_stoi_downsample():
    """ Test STOI at sampling frequency below 10 kHz. """
    for fs in [11025, 16000, 22050, 32000, 44100, 48000]:
        x = np.random.randn(2 * fs)
        y = np.random.randn(2 * fs)
        octave.eval('pkg load signal')
        stoi_out = stoi(x, y, fs)
        stoi_out_m = octave.feval('octave/stoi.m', x, y, float(fs))
        assert_allclose(stoi_out, stoi_out_m, atol=ATOL, rtol=RTOL)
Esempio n. 8
0
    def setUp(self):
        octave.restart()
        octave.addpath('./octave')
        octave.addpath('./test/octave')
        octave.eval('pkg load signal')  # load signal package

        # Test data
        N = 64
        self.in1 = _create_sine(N)
        self.out1 = np.squeeze(octave.fft(self.in1, N))
Esempio n. 9
0
    def setUp(self):
        octave.restart()
        octave.addpath('./octave')
        octave.addpath('./test/octave')
        octave.eval('pkg load signal')  # load signal package

        # Test data
        self.in1 = np.array([1, 0], dtype=np.complex64)
        self.out1 = np.array([1, 1], dtype=np.complex64)

        self.in2 = np.array([0, 1], dtype=np.complex64)
        self.out2 = np.array([1, -1], dtype=np.complex64)
def start_octave():
    print("\nStarting Octave...")
    print("------------------")
    start_time = time.time()

    from oct2py import octave
    octave.cd("./SSC_ADMM_v1.1")
    octave.eval("svdDriversCompare")

    print("Elapsed: {0:.2f} sec".format(time.time() - start_time))

    return octave
Esempio n. 11
0
    def stoi(self, filepath, clean_filepath=None):
        # filepath = path to mashup
        # Needs octave and octave-signal installed
        # Use "pip install oct2py" to install python - octave bridge
        # STOI assumes
        # * a sampling rate of 10kHz, resamples otherwise
        # * window length of 384ms
        # * 15 third octave bands over full frequency range
        # * overlapping segments with hanning window
        # * removes silent frames
        import librosa
        from oct2py import octave
        if clean_filepath is None:
            # No clean file given.
            # Get processed and clean file from mashup.
            vocal_isolation = VocalIsolation(config)
            vocal_isolation.loadWeights(config.weights)
            audio, sampleRate = conversion.load_audio_file(filepath)
            spectrogram = conversion.audio_file_to_spectrogram(
                audio, fftWindowSize=config.fft,
                learn_phase=self.config.learn_phase)

            normalizer = Normalizer()
            normalize = normalizer.get(both=False)
            denormalize = normalizer.get_reverse()

            # normalize
            spectogram, norm = normalize(spectrogram)

            info = vocal_isolation.process_spectrogram(spectrogram,
                                                       config.get_channels())
            spectrogram, new_spectrogram = info
            # de-normalize
            new_spectrogram = denormalize(new_spectrogram, norm)

            processed = conversion.spectrogram_to_audio_file(new_spectrogram,
                                                             config.fft,
                                                             config.phase_iterations)

            clean_filepath = filepath.replace("_all.wav", "_vocal.wav")
            clean, sampling_rate = librosa.load(clean_filepath)
        else:
            # A clean file is given.
            # Compare it with the processed audio.
            processed, sampling_rate = librosa.load(filepath)
            clean, sampling_rate = librosa.load(clean_filepath)

        # Make sure the original and processed audio have the same length
        clean = clean[:processed.shape[0]]

        octave.eval("pkg load signal")
        d = octave.stoi(clean, processed, sampling_rate)
        self._write("stoi: %f" % d)
Esempio n. 12
0
def eval_reg():
    print('\nDemo3.\nEval octave regression工具包')
    # pkg is the package manager of octave like apt in ubuntu
    octave.eval('pkg load statistics')
    x = [143, 145, 146, 147, 149, 150, 153, 154, 155, 156, 157, 158, 159, 160, 162, 164]
    Y = [88, 85, 88, 91, 92, 93, 93, 95, 96, 98, 97, 96, 98, 99, 100, 102]
    yb, k = octave.regression(x, Y, nout=2)
    if PY2:
        print('yb={}, k={}'.format(yb, k))
    else:
        eval("print(f'yb={yb}, k={k}')")
    print('执行脚本call_regression')
    octave.eval('call_regression')
Esempio n. 13
0
def test_resample():
    """ Compare Octave and SciPy resampling.
    Both packages use polyphase resampling with a Kaiser window. We use
    the window designed by Octave in the SciPy resampler."""
    RTOL = 1e-4
    for fs in [8000, 11025, 16000, 22050, 32000, 44100, 48000]:
        x = np.random.randn(2 * fs)
        octave.eval('pkg load signal')
        x_m, h = octave.resample(x, float(FS), float(fs), nout=2)
        h = np.squeeze(h)
        x_m = np.squeeze(x_m)
        x_r = resample_oct(x, FS, fs)
        assert_allclose(x_r, x_m, atol=ATOL, rtol=RTOL)
Esempio n. 14
0
 def calculateModelScore(self, model, averages):
     fhin = tempfile.NamedTemporaryFile(prefix='fludetector-matlab-input.')
     fhout = tempfile.NamedTemporaryFile(
         prefix='fludetector-matlab-output.')
     fhin.write('\n'.join('%s,%f' % a for a in averages))
     fhin.flush()
     octave.eval(
         "%s('%s','%s')" %
         (model.get_data()['matlab_function'], fhin.name, fhout.name))
     value = float(open(fhout.name).read().strip())
     fhin.close()
     fhout.close()
     return value
Esempio n. 15
0
def plot_calibration_mapping(calibration_model,
                             min_score,
                             max_score,
                             resolution=1000,
                             file_name='calibration_mapping.png'):
    # Function for plotting what probabilities different scores get mapped to.
    # "General purpose prediction function"
    # PERHAPS ADD PROBABILITY DISTRIBUTION OF TRAINING DATA (OR TESTING?)?
    # WOULD INDICATE HOW MANY SAMPLES FALL INTO ONE BIN.
    diff = max_score - min_score
    scores = [
        min_score + i * diff / float(resolution) for i in range(resolution + 1)
    ]
    try:  # IR model
        probabilities = calibration_model.predict(scores)
    except:
        try:  # ENIR
            import rpy2.robjects as robjects
            from rpy2.robjects.packages import importr
            enir = importr('enir')
            r = robjects.r
            # Automatic conversion or numpy arrays to R-vectors
            import rpy2.robjects.numpy2ri
            rpy2.robjects.numpy2ri.activate()
            # ENIR-MODEL MIGHT NEED TO BE PUSHED TO R-ENVIRONMENT?
            probabilities = enir.enir_predict(calibration_model,
                                              robjects.FloatVector(scores))
            probabilities = np.array(probabilities)
        except:
            try:  # BBQ
                from oct2py import octave
                octave.eval("addpath('./calibration/BBQ/')", verbose=False)
                octave.push('scores', scores, verbose=False)
                octave.push('calibration_model',
                            calibration_model,
                            verbose=False)
                octave.eval(
                    'probabilities = predict(calibration_model, scores, 1)',
                    verbose=False)
                probabilities = octave.pull('probabilities', verbose=False)
                probabilities = np.array([item[0] for item in probabilities])
            except:
                pass  # Continue with BIR and WABIR? RCIR?
    # Plot score vs. probability:
    plt.plot(scores, probabilities)
    plt.title("Calibration mapping")
    plt.savefig(file_name)
    plt.gcf().clear()
Esempio n. 16
0
    def fit(self, K, y):
        """Learn a low-rank kernel approximation.

        :param K: (``numpy.ndarray``) or of (``Kinterface``). The kernel to be approximated with G.

        :param y: (``numpy.ndarray``) Class labels :math:`y_i \in {-1, 1}` or regression targets.
        """

        # Convert to explicit form
        K = K[:, :]
        y = y.reshape((len(y), 1))

        # Call original implementation
        octave.push(["K", "y", "rank", "centering", "kappa", "delta", "tol"], [
            K, y, self.rank, self.centering, self.kappa, self.delta, self.tol
        ])
        octave.eval(
            "[G, P, Q, R, error1, error2, error, predicted_gain, true_gain] = csi(K, y, rank, centering, kappa, delta, tol)",
            verbose=False)
        G, P, Q, R, error1, error2, error, predicted_gain, true_gain = \
            octave.pull(["G", "P", "Q", "R", "error1", "error2", "error", "predicted_gain", "true_gain"])
        R = np.atleast_2d(np.array(R))

        # Octave indexes from 1
        P = P.ravel().astype(int) - 1

        # Resort rows to respect the order
        n, k = K.shape[0], self.rank
        self.I = self.active_set_ = list(P[:k])

        Go = np.zeros((n, k))
        Qo = np.zeros((n, k))
        Ro = np.zeros((k, k))
        km = min(k, G.shape[1])
        Go[P, :km] = G[:, :km]
        Qo[P, :km] = Q[:, :km]
        Ro[:km, :km] = R[:km, :km]
        self.G = Go[:, :self.rank]
        self.P = P[:self.rank]
        self.Q = Qo[:, :]
        self.R = Ro[:, :self.rank]
        self.error1 = error1
        self.error1 = error2
        self.error = error
        self.predicted_gain = predicted_gain
        self.true_gain = true_gain
        self.trained = True
        self.active_set_ = self.I[:self.rank]
Esempio n. 17
0
def dense_sift(image, fraction=1.0):
    """ dense SIFT
        use VLFEAT vl_phow through octave; expects a grayscale image
    """
    octave.push("im", image)
    octave.eval("im = single(im);")
    octave.eval("[kp,siftd] = vl_phow(im); ")
    descriptors = octave.pull("siftd")

    # flip from column-major to row-major
    descriptors = descriptors.T

    if fraction < 1.0:
        descriptors = random_sample(descriptors, fraction)

    return descriptors
Esempio n. 18
0
def calc(api, cmd):
    global rs
    rs = ""
    plot = "plot"

    try:
        result = octave.eval(cmd['params'],
                             stream_handler=stream_handler,
                             plot_width=1200,
                             plot_height=600,
                             plot_dir=".",
                             plot_format='PNG',
                             plot_name=plot)
    except Exception as e:
        rs = str(e)

    reply = "@" + cmd['from'] + "\n" + rs
    file = plot + '001.PNG'

    try:
        j = api.upload_media(open(file, 'rb'), 'image/png').json()
        api.post_comment(cmd['id'], reply, attachment=j['guid'])
    except:
        api.post_comment(cmd['id'], reply)

    for filename in os.listdir('.'):
        if re.search(r'\.png$', filename, re.IGNORECASE):
            os.remove(filename)
    def get_dim(loop):
        octave.eval('pkg load image')
        octave.addpath(os.path.abspath('matlab/hausdorff'))

        categories = [
            'H0', 'HDE', 'HRE', 'QE', 'IAE', 'ISE', '\u03C3 Huber',
            '\u03B2 Laplace', '\u03B3', '\u03C3 Gauss', '\u03B1', 'H3', 'H2',
            'H1'
        ]
        values = [
            loop.h0,
            np.abs(loop.minHde / loop.hde), loop.minHre / loop.hre,
            loop.minQe / loop.qe, loop.minIae / loop.iae,
            loop.minIse / loop.ise, loop.minRsig / loop.rsig,
            loop.minLb / loop.lb, loop.minSgam / loop.sgam,
            loop.minGsig / loop.gsig, loop.salf - 1, loop.h3, loop.h2, loop.h1,
            loop.h0
        ]
        N = len(categories)
        angles = [n / float(N) * 2 * np.pi for n in range(N)]
        angles += angles[:1]
        ax = plt.subplot(polar=True)
        ax.set_rlabel_position(0)
        ax.spines['polar'].set_visible(False)
        ax.grid(visible=False)
        plt.xticks(visible=False)
        plt.yticks(visible=False)
        plt.ylim(0, 1)
        ax.set_theta_zero_location('N')
        ax.plot(angles, values, color='black')
        ax.fill(angles, values, color='black')

        filename = f'radar_{loop.id}.png'
        plt.savefig(filename, format='png')
        plt.close()
        result = octave.hausDim(filename)
        avg = np.average([float(item) for item in values[:-1]])
        os.remove(filename)

        return result, result * avg
Esempio n. 20
0
def sparse_sift(image, fraction=1.0):
    """ sparse oriented SIFT at Harris-LaPlace and Difference of Gaussians keypoints 
        use VLFEAT vl_covdet through octave; expects a grayscale image
    """
    octave.push("im", image)
    octave.eval("im = single(im);")
    octave.eval(
        "[kp,sift_hl] = vl_covdet(im, 'method', 'HarrisLaplace', 'EstimateOrientation', true); "
    )
    octave.eval(
        "[kp,sift_dog] = vl_covdet(im, 'method', 'DoG', 'EstimateOrientation', true); "
    )
    octave.eval("descrs = [sift_hl, sift_dog];")
    descriptors = octave.pull("descrs")

    # flip from column-major to row-major
    descriptors = descriptors.T

    if fraction < 1.0:
        descriptors = random_sample(descriptors, fraction)

    return descriptors
Esempio n. 21
0
from oct2py  import octave
from numpy import matrix
from numpy import linalg
from numpy import ma
from numpy import sum
#script ouputs possible combos of boggle board
#arbitrary point arithmetic is nice
octave.addpath('.')
octave.eval('boggleAdjentMatrix')
AdjentMatrix = octave.pull('boggleAdj')
AdjentMatrix = matrix(AdjentMatrix)

SumAdjentMatrix = matrix(ma.zeros((16,16),dtype=int))

for n in range(2,16): #len(word) >= 3 and len(word) == len(path+1)
	SumAdjentMatrix+=AdjentMatrix**n
	
NPaths = sum(SumAdjentMatrix)
print(NPaths)
# mesh.save('mesh.stl')
mlab.show()
cube = mesh.Mesh(np.zeros(hull.simplices.shape[0], dtype=mesh.Mesh.dtype))
# cube = mesh.Mesh(np.zeros((myvolume2.shape), dtype=mesh.Mesh.dtype))
for i, f in enumerate(hull.simplices):
    for j in range(3):
        cube.vectors[i][j] = hull.points[f[j], :]

# Write the mesh to file "cube.stl"
cube.save(
    'D:\matlab_useful_codes\Mesh_voxelisation\Mesh_voxelisation\cube2.stl')
############################running matlab scripts in python #################################
from oct2py import octave as oct
# octave = oct.oct2py('D:\Octave\Octave-5.1.0.0\mingw64\\bin\octave-cli.exe')
import os
oct.eval("cd D:\matlab_useful_codes\Mesh_voxelisation\Mesh_voxelisation")
cwd = os.getcwd()
oct.addpath(cwd)
oct.addpath('D:\matlab_useful_codes\Mesh_voxelisation\Mesh_voxelisation')
oct.feval('VOXELISE_example_function', 'cube2.stl', rmax2 - rmin2,
          cmax2 - cmin2, zmax2 - zmin2)
# oct.feval('VOXELISE_example_function','cube2.stl',100,100,100)
oct.eval("cd D:\matlab_useful_codes\Mesh_voxelisation")
# oct.eval("save -v7 myworkspace.mat")
from scipy.io import loadmat
D = loadmat(
    "D:\matlab_useful_codes\Mesh_voxelisation\Mesh_voxelisation\myworkspace.mat"
)
print(D.keys())
########reading the .mat matrix convert it to numpy array and save it as .nrrd image using SimpleITK
# z=sio.loadmat('test_voxel.mat')
Esempio n. 23
0
import csv

np.set_printoptions(formatter={'float': lambda x: "{0:0.3f}".format(x)})
np.set_printoptions(precision=3)

N_x = 20
N_y = 20
N_2 = N_x * N_y

###################################################
# LOAD REFERENCE

# Read file
from oct2py import octave as oct
oct.addpath("res/RANDOM_single_region/500_2000_250000")  #REFERENCE
oct.eval("serp_full_core_det0")
oct.eval("save -v7 saved_rates.mat")

from scipy.io import loadmat
dict = loadmat("saved_rates.mat")

# Plot reaction rates on 2D RZ plane
rr_ref = dict['DET1'][:, -2][:N_2].reshape((N_2, 1))
rr_ref_plot = rr_ref
rr_ref_plot = rr_ref_plot.reshape(N_x, N_y)

# Print coordinates
r_vec = dict['DET1R'][:, -1].reshape((N_x, 1))
z_vec = dict['DET1Z'][:, -1].reshape((N_y, 1))
r_vec /= 100
z_vec /= 100
Esempio n. 24
0
# NAEINI MODEL AS NO CV IS REQUIRED.
# The script can also be used to find the RCIR-model that corresponds to the 'd' value
# of a Naeini model. In this case, both models are trained on the same data as the
# RCIR does not need a validation set in the context. Performance is then compared
# on a separate testing set.

import isotonic
from sklearn.isotonic import IsotonicRegression
# from sklearn.neighbors import KNeighborsClassifier
import numpy as np
from scipy.interpolate import interp1d
from oct2py import octave
from sklearn.metrics import roc_auc_score
# Enable octave to find Naeini's functions!
# (download from https://github.com/pakdaman/calibration)
octave.eval("addpath('./calibration/BBQ/')", verbose=False)

# Set test parameters:
dataset = input("Select dataset to run experiment on (1, 2, or 3): ")
n_iterations = input("Set number of iterations (30 used in paper): ")
# metric = input("Select metric ('mse' or 'auc_roc'): ")  # Perhaps allow only auc-roc and exclude mse?
metric = 'auc_roc'  # Can also be set to 'mse' to run the algorithm with mse-based bin merges.
# reshuffle = input("Shuffle data? (y/n): ")  # Should be shuffled at least once. Perhaps remove option.
# It seems that there is no convenient way of estimating the credible intervals, not to speak of the maximum
# credible intervals for the Naeini procedure. Hence the 'Naeini vs. RCIR with d set by Naeini vs. IR'
# is pointless.
# model_comparison = input("Naeini vs. RCIR-CV ('Naeini vs. RCIR-CV') or Naeini vs. RCIR? ")
model_comparison = 'Naeini vs. RCIR-CV'  # Set to anything else, it will run Naeini vs. RCIR with d set by the Naeini model.
# model_comparison = 'Naeini vs. RCIR with d set by Naeini vs. IR'
naeini_metrics = []
rcir_better_than_naeini_metrics = []
Esempio n. 25
0
# ==== Importations
from __future__ import print_function
import math
import matplotlib.pyplot as plt
import numpy as np
import scipy.stats
import sys

import matplotlib.gridspec as gridspec  # subplots with different sizes
from matplotlib.ticker import NullFormatter  # matrices with margins
from joblib import Parallel, delayed

try:
    from oct2py import octave
    octave.eval(
        "addpath(genpath('/home/maxime/compressed-sensing/3_code/SPIRALTAP/'))"
    )
    octa = True
except Exception:
    octa = False

from sklearn.linear_model import Lasso
from sklearn.linear_model import Ridge
import pySPIRALTAP
import pyCSalgos.BP.l1eq_pd


# ==== Measure & reconstruct
def measure(data, basis, gaussian=0, poisson=0):
    """Function computes the dot product <x,phi>
    for a given measurement basis phi
Esempio n. 26
0
import os
import subprocess
import csv
import numpy
import matplotlib.pyplot as plt
import math
from numpy import vstack
from numpy import zeros
from oct2py import octave
octave.addpath('./scripts/')

octave.eval("collisionPDF");
Esempio n. 27
0
def index():
    octave.eval('x = struct("y", {1, 2}, "z", {3, 4});')
    x = octave.pull('x')
    return str(x[0, 1].z)
Esempio n. 28
0
def find_PPG_peaks(vec):
    cur = np.array(vec)
    octave.eval("pkg load signal")
    peaks, indexes = octave.findpeaks(cur, 'DoubleSided', 'MinPeakHeight', 1000, 'MinPeakDistance', 50, 'MinPeakWidth', 0)
    return indexes
Esempio n. 29
0
mlab.triangular_mesh(points[:, 0], points[:, 1], points[:, 2], hull.simplices)
# mesh.save('mesh.stl')
mlab.show()
cube = mesh.Mesh(np.zeros(hull.simplices.shape[0], dtype=mesh.Mesh.dtype))
for i, f in enumerate(hull.simplices):
    for j in range(3):
        cube.vectors[i][j] = hull.points[f[j], :]

# Write the mesh to file "cube.stl"
cube.save('cube2.stl')
############################running matlab scripts in python #################################
from oct2py import octave as oct
# octave = oct.oct2py('D:\Octave\Octave-5.1.0.0\mingw64\\bin\octave-cli.exe')
import os

oct.eval("cd D:\matlab_useful_codes\Mesh_voxelisation\Mesh_voxelisation")
cwd = os.getcwd()

oct.addpath(cwd)
oct.addpath('D:\matlab_useful_codes\Mesh_voxelisation\Mesh_voxelisation')
oct.eval("VOXELISE_example")
oct.eval("cd D:\matlab_useful_codes\Mesh_voxelisation")
oct.eval("save -v7 myworkspace.mat")

from scipy.io import loadmat
D = loadmat("D:\matlab_useful_codes\Mesh_voxelisation\myworkspace.mat")
print(D.keys())

########reading the .mat matrix convert it to numpy array and save it as .nrrd image using SimpleITK
# z=sio.loadmat('test_voxel.mat')
z2 = D['OUTPUTgrid']
#os.environ["FLASK_APP"] = inspect.getfile(inspect.currentframe())
os.environ[
    "OCTAVE_EXECUTABLE"] = "C:\\Octave\\Octave-4.2.2\\bin\\octave-cli.exe"

from oct2py import octave
from flask import Flask
from dash import Dash
from dash.dependencies import Input, Output
import dash_html_components as html
import dash_core_components as dcc

flask_app = Flask(__name__)
dash_app = Dash(__name__, server=flask_app)

filepath = 'C:\\Users\\SEC\\Documents\\Alperia\\HydroptModel\\vsm.mod'
octave.eval("load('" + filepath + "', '-mat')")
globalData = octave.pull('Data')

dash_app.layout = html.Div(id='page-content',
                           children=[dcc.Location(id='url', refresh=False)])


@dash_app.callback(Output('page-content', 'children'),
                   [Input('url', 'pathname')])
def display_page(pathname):
    return dash_router(pathname)


def dash_router(url):
    children = render_cockpit(globalData)
    return children
Esempio n. 31
0
            '%02d' % xpos + '_' + '%02d' % ypos + '/'

        if os.path.exists(JSON_NAME):
            with open(JSON_NAME, 'rt') as infile:
                csi_dict = json.load(infile)
        else:
            csi_dict = {}

    if os.path.exists(plot_dir):
        shutil.rmtree(plot_dir)
    os.mkdir(plot_dir)

    dat_path = os.path.abspath(sys.argv[1])
    octave.addpath('/home/adrian/csi/linux-80211n-csitool-supplementary/matlab')
    # FAQ #2
    octave.eval("csi_trace = read_bf_file('" + dat_path + "');")
    pkts = octave.eval("rows(csi_trace);")
    print 'Trace has', pkts, 'packets.'

    # overwrite is permitted
    csi_dict[str((xpos, ypos))] = []

    for index in range(1, int(pkts) + 1):  # Octave indexes from 1
        octave.eval("csi_entry = csi_trace{" + str(index) + "};")
        rssi_a, rssi_b, rssi_c = octave.eval("csi_entry.rssi_a;"), \
            octave.eval("csi_entry.rssi_b;"), octave.eval("csi_entry.rssi_c;")

        octave.eval("csi = get_scaled_csi(csi_entry);")
        octave.eval("save -6 " + MATDIR + "temp.mat csi;")

        mat_contents = sio.loadmat(MATDIR + 'temp.mat')['csi']
os.system('python3 ./scripts/generate_for_temperatures.py {N} {L} {velocity_modules} {small_radius} {small_mass} {big_radius} {big_mass}'.format(
		N = N,
		L = L,
		velocity_modules = str(max_velocity_modules).replace(']','').replace('[','').replace(' ',''),
		small_radius = small_radius,
		small_mass = small_mass,
		big_radius = big_radius,
		big_mass = big_mass
		));

for k in range(0, times):
	command = 'java -jar ./target/molecular-dynamics-simulation-1.0-SNAPSHOT.jar --dynamicFile=./data/Dynamic-N={N}-V={max_velocity_module}.txt --staticFile=./data/Static-N={N}.txt --time={limitTime} --boxSize={L}'.format(
						N = N,
						max_velocity_module = max_velocity_modules[k],
						limitTime = limitTime,
						L = L,
						)
	print(command)
	p = subprocess.Popen(command, shell=True, stdout=subprocess.PIPE, stderr=subprocess.STDOUT, bufsize=0)
	number = None;
	line = p.stdout.readlines() # Temperature line
	number = line[0].decode()
	number = number.split('\t')
	number = number[1]
	number = number.replace('\n', '')
	values[k] = float(number)
	print(values[k])
	func = 'bigParticleTrajectory(' + str(k) + ',' + str(values[k]) + ')';
	octave.eval(func)
Esempio n. 33
0
def create_data():
	octave.addpath('./matlab/')
	octave.eval("whiteNorm = powernoise(0,4096,'normalize')")
	octave.eval("whiteRand = powernoise(0,4096,'randpower')")
	octave.eval("pinkNorm = powernoise(1,4096,'normalize')")
	octave.eval("pinkRand = powernoise(1,4096,'randpower')")
	octave.eval("redNorm = powernoise(2,4096,'normalize')")
	octave.eval("redRand = powernoise(2,4096,'randpower')")

	octave.eval("s8 = pmodel(4096,0.52,-1.66)")
	octave.eval("s9 = pmodel(4096,0.62,-0.45)")
	octave.eval("s10 = pmodel(4096,0.72,-0.75)")

	# Salvando dados
	octave.eval("dlmwrite ('./data/s1_white_noise.csv',whiteNorm)")
	octave.eval("dlmwrite ('./data/s2_pink_noise.csv',pinkNorm)")
	octave.eval("dlmwrite ('./data/s3_red_noise.csv',redNorm)")

	octave.eval("dlmwrite ('./data/s8.csv',s8)")
	octave.eval("dlmwrite ('./data/s9.csv',s9)")
	octave.eval("dlmwrite ('./data/s10.csv',s10)")
	

	#octave.eval("dlmwrite ('./data/whiteRand.csv',whiteRand)")
	#octave.eval("dlmwrite ('./data/pinkRand.csv',pinkRand)")
	#octave.eval("dlmwrite ('./data/redRand.csv',redRand)")
Esempio n. 34
0
    if args.n_features_keep:
        weights = weights[..., sorted_inds_keep]

    # get the shape of the weights and make sure they satisfy certain conditions
    w_x, w_y, w_in, w_out = weights.shape

    # compute the new number of features you will have and make placeholder to store them
    nx = args.input_w - (w_x - 1)
    ny = args.input_h - (w_y - 1)
    w_out_new = (nx // args.stride_x) * (ny // args.stride_y) * w_out
    nonshared = np.zeros([args.input_w, args.input_h, w_in, w_out_new],
                         dtype=np.float64)

    # fill in the original features in the simple cell tensor
    count = 0
    for k in range(w_out):
        for i in range(0, nx, args.stride_x):
            for j in range(0, ny, args.stride_y):
                nonshared[i:i + w_x, j:j + w_y, :, count] = weights[:, :, :, k]
                count += 1

    # write the new features
    write_fpath = os.path.join(args.save_dir, os.path.split(fpath)[1])
    feat_data[0]['values'][0] = nonshared
    octave.push(['write_fpath', 'feat_data'], [write_fpath, feat_data])
    octave.eval('writepvpsharedweightfile(write_fpath, feat_data)')

logging.info('NONSHARED GRID SIZE IS {}x{}x{}.'.format(ny // args.stride_y,
                                                       nx // args.stride_x,
                                                       w_out))
Esempio n. 35
0
def load_hydopt_data(filepath):
	octave.eval("load('" + filepath + "', '-mat')") #todo: separate octave instanz für jeden user
	data = octave.pull('Data')
	return data
Esempio n. 36
0
        mod = sm.OLS(df["amp_['POz']_0.4-0.8"],
                     sm.add_constant(df[variable]),
                     hasconst=True).fit()

        bic_erps.loc[p, variable] = mod.bic
        bic_beta.loc[p, variable] = mod.params[variable]

bic_erps.to_csv(opj(outpath, 'bic_erps.csv'))

# Use octave to run the VBA-toolbox
octave.push('L', np.asarray(bic_erps.transpose()) * -1)
octave.addpath('/matlab/vbatoolbox')
octave.addpath('/matlab/vbatoolbox/core')
octave.addpath('/matlab/vbatoolbox/core/display')
octave.addpath('/matlab/vbatoolbox/utils')
octave.eval("options.DisplayWin = 0")
p, out = octave.eval("VBA_groupBMC(L, options)", nout=2)
# Save to plot
file = open(opj(outpath, 'erps_olsmean_VBAmodelcomp.pkl'), "wb")
pickle.dump(out, file)

# ########################################################################
# Mass univariate regression
###########################################################################

# Load model data
mod_data = pd.read_csv('/data/derivatives/task-fearcond_alldata.csv')

regvars = ['vhat', 'sa1hat', 'sa2hat']
regvarsnames = ['Expectation', 'Irr. uncertainty', 'Est. uncertainty']
Esempio n. 37
0
#!/usr/bin/env python
# -*- coding: utf-8 -*-
import numpy as np
from vector import vector, plot_peaks
from oct2py import octave

# Load the Octage-Forge signal package.
octave.eval("pkg load signal")

print("Detect peaks without any filters.")
(_, indexes) = octave.findpeaks(
    np.array(vector), "DoubleSided", "MinPeakHeight", 0, "MinPeakDistance", 0, "MinPeakWidth", 0
)
# The results are in a 2D array and in floats: get back to 1D array and convert
# peak indexes to integer. Also this is MatLab-style indexation (one-based),
# so we must substract one to get back to Python indexation (zero-based).
indexes = indexes[0].astype(int) - 1
print("Peaks are: %s" % (indexes))
plot_peaks(np.array(vector), indexes, algorithm="Octave-Forge findpeaks")

print("Detect peaks with minimum height and distance filters.")
(pks, indexes) = octave.findpeaks(
    np.array(vector), "DoubleSided", "MinPeakHeight", 6, "MinPeakDistance", 2, "MinPeakWidth", 0
)
# The results are in a 2D array and in floats: get back to 1D array and convert
# peak indexes to integer. Also this is MatLab-style indexation (one-based),
# so we must substract one to get back to Python indexation (zero-based).
indexes = indexes[0].astype(int) - 1
print("Peaks are: %s" % (indexes))
plot_peaks(np.array(vector), indexes, mph=6, mpd=2, algorithm="Octave-Forge findpeaks")