class EnvironmentParamsContainer(task.Task): ''' Keeps track of a bunch of environment params. Uses the internal luigi parameter mechanism. The nice thing is that we can instantiate this class and get an object with all the environment variables set. This is arguably a bit of a hack.''' local_scheduler = parameter.BooleanParameter( is_global=True, default=False, description='Use local scheduling') scheduler_host = parameter.Parameter( is_global=True, default='localhost', description='Hostname of machine running remote scheduler', config_path=dict(section='core', name='default-scheduler-host')) scheduler_port = parameter.IntParameter( is_global=True, default=8082, description='Port of remote scheduler api process', config_path=dict(section='core', name='default-scheduler-port')) lock = parameter.BooleanParameter( is_global=True, default=False, description='(Deprecated, replaced by no_lock)' 'Do not run if similar process is already running') lock_size = parameter.IntParameter( is_global=True, default=1, description="Maximum number of workers running the same command") no_lock = parameter.BooleanParameter( is_global=True, default=False, description='Ignore if similar process is already running') lock_pid_dir = parameter.Parameter( is_global=True, default='/var/tmp/luigi', description='Directory to store the pid file') workers = parameter.IntParameter( is_global=True, default=1, description='Maximum number of parallel tasks to run') logging_conf_file = parameter.Parameter( is_global=True, default=None, description='Configuration file for logging', config_path=dict(section='core', name='logging_conf_file')) module = parameter.Parameter( is_global=True, default=None, description='Used for dynamic loading of modules' ) # see DynamicArgParseInterface @classmethod def env_params(cls, override_defaults={}): # Override any global parameter with whatever is in override_defaults for param_name, param_obj in cls.get_global_params(): if param_name in override_defaults: param_obj.set_global(override_defaults[param_name]) return cls() # instantiate an object with the global params set on it
def _initialize(self): self.gamma = parameter.Parameter(shape=[self.num_features], initializer=parameter.ConstantInitializer(1.0)) self.beta = parameter.Parameter(shape=[self.num_features], initializer=parameter.ConstantInitializer(0.0)) self.running_mean = np.zeros([self.num_features], dtype='float32') self.running_var = np.zeros([self.num_features], dtype='float32') self.params['gamma'] = self.gamma self.params['beta'] = self.beta
def __init__(self, log, dir, sky, scn, nrealize=1, nobs=1, clcDet=1, specRes=1.e9): #Store passed parameters self.log = log self.dir = dir self.sky = sky self.scn = scn self.nrealize = nrealize self.nobs = nobs self.clcDet = clcDet self.specRes = specRes #Store global parameters self.configDir = self.dir+'/config' self.bandDir = self.configDir+'/Bands' self.name = dir.rstrip('/').split('/')[-1] self.log.log("Generating camera %s" % (self.name), 1) #Store camera parameters into a dictionary paramArr, valArr = np.loadtxt(self.configDir+'/camera.txt', dtype=np.str, unpack=True, usecols=[0,2], delimiter='|') dict = {paramArr[i].strip(): valArr[i].strip() for i in range(len(paramArr))} self.params = {'Boresight Elevation': self.__paramSamp(pr.Parameter(self.log, 'Boresight Elevation', dict['Boresight Elevation'], min=-40.0, max=40.0 )), 'Optical Coupling': self.__paramSamp(pr.Parameter(self.log, 'Optical Coupling', dict['Optical Coupling'], min=0.0, max=1.0 )), 'F Number': self.__paramSamp(pr.Parameter(self.log, 'F Number', dict['F Number' ], min=0.0, max=np.inf)), 'Bath Temp': self.__paramSamp(pr.Parameter(self.log, 'Bath Temp', dict['Bath Temp' ], min=0.0, max=np.inf))} #Generate camera self.generate()
class EnvironmentParamsContainer(task.Task): ''' Keeps track of a bunch of environment params. Uses the internal luigi parameter mechanism. The nice thing is that we can instantiate this class and get an object with all the environment variables set. This is arguably a bit of a hack.''' # TODO(erikbern): would be cleaner if we don't have to read config in global scope local_scheduler = parameter.BooleanParameter(is_global=True, default=False, description='Use local scheduling') scheduler_host = parameter.Parameter(is_global=True, default=get_config().get('core', 'default-scheduler-host', default='localhost'), description='Hostname of machine running remote scheduler') scheduler_port = parameter.IntParameter(is_global=True, default=8082, description='Port of remote scheduler api process') lock = parameter.BooleanParameter(is_global=True, default=False, description='Do not run if the task is already running') lock_pid_dir = parameter.Parameter(is_global=True, default='/var/tmp/luigi', description='Directory to store the pid file') workers = parameter.IntParameter(is_global=True, default=1, description='Maximum number of parallel tasks to run') @classmethod def env_params(cls, override_defaults): # Override any global parameter with whatever is in override_defaults for param_name, param_obj in cls.get_global_params(): if param_name in override_defaults: param_obj.set_default(override_defaults[param_name]) return cls() # instantiate an object with the global params set on it
class EnvironmentParamsContainer(task.Task): ''' Keeps track of a bunch of environment params. Uses the internal luigi parameter mechanism. The nice thing is that we can instantiate this class and get an object with all the environment variables set. This is arguably a bit of a hack.''' local_scheduler = parameter.BooleanParameter( is_global=True, default=False, description='Use local scheduling') scheduler_host = parameter.Parameter( is_global=True, default=None, description='Hostname of machine running remote scheduler') scheduler_port = parameter.IntParameter( is_global=True, default=None, description='Port of remote scheduler api process') lock = parameter.BooleanParameter( is_global=True, default=True, description='(Deprecated, replaced by no_lock)' 'Do not run if similar process is already running') no_lock = parameter.BooleanParameter( is_global=True, default=False, description='Ignore if similar process is already running') lock_pid_dir = parameter.Parameter( is_global=True, default='/var/tmp/luigi', description='Directory to store the pid file') workers = parameter.IntParameter( is_global=True, default=1, description='Maximum number of parallel tasks to run') logging_conf_file = parameter.Parameter( is_global=True, default=None, description='Configuration file for logging') @classmethod def apply_config_defaults(cls): cls.scheduler_host.set_default(configuration.get_config().get( 'core', 'default-scheduler-host', 'localhost')) cls.scheduler_port.set_default(configuration.get_config().get( 'core', 'default-scheduler-port', 8082)) cls.logging_conf_file.set_default(configuration.get_config().get( 'core', 'logging_conf_file', None)) @classmethod def env_params(cls, override_defaults): cls.apply_config_defaults() # Override any global parameter with whatever is in override_defaults for param_name, param_obj in cls.get_global_params(): if param_name in override_defaults: param_obj.set_default(override_defaults[param_name]) return cls() # instantiate an object with the global params set on it
def _initialize(self, with_grad=True): if self.weight_initializer is None: self.weight_initializer = parameter.GaussianInitializer() if self.bias_initializer is None: self.bias_initializer = parameter.ConstantInitializer() self.weights = parameter.Parameter(shape=self._weights_shape, with_grad=with_grad, initializer=self.weight_initializer) self.bias = parameter.Parameter(shape=[self._weights_shape[-1]], with_grad=with_grad, initializer=self.bias_initializer) self.params['weights'] = self.weights self.params['bias'] = self.bias
def __init__(self): self.prmtr = parameter.Parameter() print('loading gene pickle') self.GGraph = pickle.load(open("F:\\Ensembl\\genes_mu_chr1_6.p", "rb")) print('loading tree pickle') self.tree = pickle.load(open("F:\\Ensembl\\trie_chr1_6.p", "rb")) print('pickle loaded')
def __init__(self): self.prmtr = parameter.Parameter() self.GGraph = graph.GGraph() self.GGraph.addNode(-1, 's', 0, 'source') self.chr = '' self.seq = '' self.Process()
class EnvironmentParamsContainer(task.Task): ''' Keeps track of a bunch of environment params. Uses the internal luigi parameter mechanism. The nice thing is that we can instantiate this class and get an object with all the environment variables set. This is arguably a bit of a hack.''' # TODO(erikbern): would be cleaner if we don't have to read config in global scope local_scheduler = parameter.BooleanParameter(is_global=True, default=False, description='Use local scheduling') scheduler_host = parameter.Parameter(is_global=True, default=get_config().get('core', 'default-scheduler-host', default='localhost'), description='Hostname of machine running remote scheduler') lock = parameter.BooleanParameter(is_global=True, default=False, description='Do not run if the task is already running') lock_pid_dir = parameter.Parameter(is_global=True, default='/var/tmp/luigi', description='Directory to store the pid file') workers = parameter.IntParameter(is_global=True, default=1, description='Maximum number of parallel tasks to run')
def __init__(self): self.found = 0 self.notfound = 0 self.multiple_results_cnt = defaultdict(int) self.prmtr = parameter.Parameter() # self.graph = gene.ConstructGraph() self.graph = gene.unpickleGraph() self.parseFASTQ()
class EnvironmentParamsContainer(task.ConfigWithoutSection): ''' Keeps track of a bunch of environment params. Uses the internal luigi parameter mechanism. The nice thing is that we can instantiate this class and get an object with all the environment variables set. This is arguably a bit of a hack. ''' local_scheduler = parameter.BoolParameter( default=False, description='Use local scheduling') scheduler_host = parameter.Parameter( default='localhost', description='Hostname of machine running remote scheduler', config_path=dict(section='core', name='default-scheduler-host')) scheduler_port = parameter.IntParameter( default=8082, description='Port of remote scheduler api process', config_path=dict(section='core', name='default-scheduler-port')) lock_size = parameter.IntParameter( default=1, description="Maximum number of workers running the same command") no_lock = parameter.BoolParameter( default=False, description='Ignore if similar process is already running') lock_pid_dir = parameter.Parameter( default=os.path.join(tempfile.gettempdir(), 'luigi'), description='Directory to store the pid file') workers = parameter.IntParameter( default=1, description='Maximum number of parallel tasks to run') logging_conf_file = parameter.Parameter( default=None, description='Configuration file for logging', config_path=dict(section='core', name='logging_conf_file')) module = parameter.Parameter( default=None, description='Used for dynamic loading of modules' ) # see DynamicArgParseInterface parallel_scheduling = parameter.BoolParameter( default=False, description='Use multiprocessing to do scheduling in parallel.', config_path={ 'section': 'core', 'name': 'parallel-scheduling' }, )
def __init__(self): self.prmtr = parameter.Parameter() self.gff = 'F:\\Ensembl\\out_seq.txt' self.out_FASTQ = 'F:\\Ensembl\\out_seq.fq' self.line1_str = '@ZZZ' self.line3_str = '+ZZZ\n' self.line5_str = '???\n' self.cnt = 0 self.Process()
def setParameter(self, name, ptype, val): """Set parameter name of type ptype to value, creating a new Parameter if necessary.""" try: param = self.getElement(name) except errors.MissingXMLError: pass else: assert ptype == param.get('type') self.remove(param) self.append(parameter.Parameter(name, ptype, val))
def __init__(self, nb_input, nb_output): """ Initialize a linear layer Args: nb_input: length of the input nb_output: length of the output """ self.nb_input = nb_input self.nb_output = nb_output self.params = parameter.Parameter((nb_output, nb_input)) self.params.set_value( torch.empty(nb_output, nb_input).uniform_(-1 / math.sqrt(self.nb_input), 1 / math.sqrt(self.nb_input))) self.b = parameter.Parameter((1, nb_output)) self.b.set_value( torch.empty(1, nb_output).uniform_(-1 / math.sqrt(self.nb_input), 1 / math.sqrt(self.nb_input)))
def __init__(self, parent, param_filename=None): Frame.__init__(self, parent) self.title = 'SpineDTI' self.log = os.path.join(os.path.abspath('.'), 'log') self.parent = parent self.parameter_values = {} self.param = parameter.Parameter() self.initUI() self.init_variable() self.update_param_from_text() if param_filename is not None: self.read_param(param_filename)
def __init__(self, fgndDict=None, nrealize=1): self.__ph = ph.Physics() #Sample the foreground parameters if fgndDict: def samp(param, pos=False, norm=False): if nrealize == 1: return param.getAvg() else: return param.sample(nsample=1, pos=pos, norm=norm) self.dustTemp = samp(pr.Parameter(fgndDict['Dust Temperature']), pos=True) self.dustSpecIndex = samp(pr.Parameter( fgndDict['Dust Spec Index'])) self.dustAmp = samp(pr.Parameter(fgndDict['Dust Amplitude']), pos=True) self.dustFrequency = samp(pr.Parameter( fgndDict['Dust Scale Frequency']), pos=True) self.syncSpecIndex = samp( pr.Parameter(fgndDict['Synchrotron Spec Index'])) self.syncAmp = samp(pr.Parameter( fgndDict['Synchrotron Amplitude']), pos=True) else: #Dust constants, taken from Planck self.dustTemp = 19.7 #[K] self.dustSpecIndex = 1.5 self.dustAmp = 2.e-3 self.dustFrequency = 353 * un.GHzToHz #[Hz] #Synchrotron constants self.syncSpecIndex = -3.0 self.syncAmp = 6.e3 #Dust angular power spectrum constants, taken from Dunkley self.dustAngAmp = 8.e-12 self.dustEll0 = 10.0 self.dustNu0 = 90.0 * un.GHzToHz #[Hz] self.dustM = -0.5 #Synchrotron angular power spectrum constants, taken from Dunkley self.syncAngAmp = 4e-12 self.syncEll0 = 10.0 self.syncNu0 = 90.0 * un.GHzToHz #[Hz] self.syncM = -0.6
) # makes sure node names are distinct assert (len(self.nodes) == len(set(self.nodes)) ) # make sure nodes are distinct #Edges for n in range(len(self.nodes)): assert (self.QList[n][n] == 0) #Q0 = self.Q.copy() # Q0 is for checking that off diagonal is positive #numpy.fill_diagonal(Q0,0.) # diagonal is negative so set to zero #assert(numpy.amin(Q0)==0) # now minimum element should be zero (on diagonal) #assert(self.Q.shape == (len(self.nodes),len(self.nodes))) self.reparameterize() # This code sets up a canonical channel # EK,ENa,EL are Hodgkin Huxley values take from http://icwww.epfl.ch/~gerstner/SPNM/node14.html EK = parameter.Parameter("EK", -12 - 65, "mV", log=False) ENa = parameter.Parameter("ENa", 115 - 65, "mV", log=False) EL = parameter.Parameter("EL", 10.6 - 65, "mV", log=False) # gNa,gK, gL are Hodgkin Huxley values take from http://icwww.epfl.ch/~gerstner/SPNM/node14.html gNa = parameter.Parameter("gNa", 120, "mS/cm^2", log=True) gK = parameter.Parameter("gK", 36, "mS/cm^2", log=True) gL = parameter.Parameter("gL", 0.3, "mS/cm^2", log=True) # I = gV # gmax_khh is from www.neuron.yale.edu, but is a density parameter inappropriate for a single channel; use g_open instead gmax_khh = parameter.Parameter("gmax_khh", 0.02979, "microsiemens", log=True) # "The single-channel conductance of typical ion channels ranges from 0.1 to 100 pS (picosiemens)." Bertil Hille (2008), Scholarpedia, 3(10):6051. # For now, g_open is used only for plotting g_open = parameter.Parameter("g_open", 1., "picosiemens", log=True) # gNa_open, gK_open from Adam Strassber and Louis DeFelice "Limitations of the Hodgkin-Huxley Formalism: Effects of # single channel kinetics on Transmembrane Voltage Dynamics, Neural Computation 5, 843-855 (1993) PAGE 845 gK_open = parameter.Parameter("gK_open", 20., "picosiemens", log=True)
import channel import numpy as np import math import random import parameter import scipy import scipy.linalg from parameter import u import matplotlib import matplotlib.pyplot as pyplot import engine # default_dt = parameter.Parameter("dt",0.05,"ms",log=True) default_dt = parameter.Parameter("dt", 0.01, "ms", log=True) default_tstop = parameter.Parameter("tstop", 20., "ms", log=True) preferred = parameter.preferredUnits() preferred.time = 'ms' preferred.voltage = 'mV' preferred.conductance = 'pS' preferred.current = "fA" class StepProtocol(object): def __init__(self, patch, voltages, voltageStepDurations): self.thePatch = patch self.voltages = voltages self.voltageStepDurations = voltageStepDurations self.setSampleInterval(default_dt) self.preferred = preferred
def __init__(self, log, dir, fgndDict=None, nrealize=1, nobs=1, clcDet=1, elv=None, pwv=None, specRes=1.e9, foregrounds=False): self.log = log self.dir = dir self.configDir = dir + 'config/' self.name = dir.rstrip('/').split('/')[-1] #Store the program parameters params, vals = np.loadtxt(self.configDir + 'program.txt', unpack=True, usecols=[0, 2], dtype=np.str, delimiter='|') dict = {params[i].strip(): vals[i].strip() for i in range(len(params))} self.log.log( 'Using program parameter file %s' % (self.configDir + 'program.txt'), 1) #Sample the program parameters def samp(param, pos=False, norm=False): if nrealize == 1: return param.getAvg() else: return param.sample(nsample=1, pos=pos, norm=norm) self.tobs = samp(pr.Parameter(dict['Observation Time'], un.yrToSec), pos=True) self.fsky = samp(pr.Parameter(dict['Sky Fraction']), pos=True, norm=True) self.obsEff = samp(pr.Parameter(dict['Observation Efficiency']), pos=True, norm=True) self.netMgn = samp(pr.Parameter(dict['NET Margin']), pos=True) #Store sky object atmFile = sorted(gb.glob(self.configDir + '/atm*.txt')) if len(atmFile) == 0: atmFile = None self.log.log( "No custom atmosphere provided; using Atacama MERRA AM-simulated sky", 1) elif len(atmFile) > 1: atmFile = None self.log.log( 'More than one atm file found in %s; ignoring them all' % (self.configDir), 2) else: atmFile = atmFile[0] self.log.log("Using custom atmosphere defined in %s" % (atmFile), 2) self.sky = sk.Sky(self.log, nrealize=1, fgndDict=fgndDict, atmFile=atmFile, pwv=pwv, generate=False, foregrounds=foregrounds) #Store scan strategy object scanFile = sorted(gb.glob(self.configDir + '/elevation.txt')) if len(scanFile) == 0: scanDict = None self.log.log( "No scan strategy provided; using default elevation distribution", 1) elif len(scanFile) > 1: scanDict = None self.log.log( 'More than one scan strategy file found in %s; ignoring them all' % (self.configDir), 2) else: scanFile = scanFile[0] params, vals = np.loadtxt(scanFile, unpack=True, usecols=[0, 1], dtype=np.str, delimiter='|') scanDict = { params[i].strip(): vals[i].strip() for i in range(2, len(params)) } self.log.log("Using scan strategy defined in %s" % (scanFile), 2) self.scn = sc.ScanStrategy(self.log, scanDict=scanDict, elv=elv) #Store camera objects cameraDirs = sorted(gb.glob(dir + '/*/')) cameraDirs = [x for x in cameraDirs if 'config' not in x] self.cameras = [ cm.Camera(self.log, dir, self.sky, self.scn, nrealize=nrealize, nobs=nobs, clcDet=clcDet, specRes=specRes) for dir in cameraDirs ]
def __init__(self, log, dir, fgndDict=None, nrealize=1, nobs=1, clcDet=1, specRes=1.e9, foregrounds=False): #Storing passed parameters self.log = log self.dir = dir self.fgndDict = fgndDict self.nrealize = nrealize self.nobs = nobs self.clcDet = clcDet self.specRes = specRes self.fgnds = foregrounds #Storing global parameters self.configDir = os.path.join(self.dir, 'config') self.name = os.path.split(self.dir)[-1] self.log.log("Instantiating telescope %s" % (self.name), 1) #Store the telescope parameters in a dictionary paramArr, valArr = np.loadtxt(os.path.join(self.configDir, 'telescope.txt'), unpack=True, usecols=[0, 2], dtype=np.str, delimiter='|') dict = { paramArr[i].strip(): valArr[i].strip() for i in range(len(paramArr)) } self.params = { 'Site': self.__paramSamp(pr.Parameter(self.log, 'Site', dict['Site'])), 'Elevation': self.__paramSamp( pr.Parameter(self.log, 'Elevation', dict['Elevation'], min=20., max=90.)), 'PWV': self.__paramSamp( pr.Parameter(self.log, 'PWV', dict['PWV'], min=0.0, max=8.0)), 'Observation Time': self.__paramSamp( pr.Parameter(self.log, 'Observation Time', dict['Observation Time'], un.yrToSec, min=0.0, max=np.inf)), 'Sky Fraction': self.__paramSamp( pr.Parameter(self.log, 'Sky Fraction', dict['Sky Fraction'], min=0.0, max=1.0)), 'Observation Efficiency': self.__paramSamp( pr.Parameter(self.log, 'Observation Efficiency', dict['Observation Efficiency'], min=0.0, max=1.0)), 'NET Margin': self.__paramSamp( pr.Parameter(self.log, 'NET Margin', dict['NET Margin'], min=0.0, max=np.inf)) } #Generate the telescope self.generate()
symmetry = False n_mels = 40 #number of mel bands to use power = 2 #Exponent for Mel Spectrogram, 1 = Energy, 2 = Power mfccs = 20 #Number of MFCC Features to extract # moegliche werte: "MFCCS", "Spektrum", "Melspektrum", Gross-/Kleinschreibung egal #featureArt = ["Spektrum", "Melspektrum", "MFCCS"] feature_type = "spektrum" #Pfade input_path = '/home/schoeffler/PycharmProjects/spectrogram_1/Audios' output_path = '/home/schoeffler/PycharmProjects/DatenOrdnerTEST' # Erzeugung des Objekts zur Parameterbuendelung extraction_Parameter = parameter.Parameter(n_fft=n_fft, win_length=win_length, hop_length=hop_length, window=window, symmetry=symmetry, n_mels=n_mels, power=power, mfccs=mfccs) def main(): Extraction_functions.extract(input_path, output_path, extraction_Parameter, feature_type) main()
def run(self): if self.lst_dwi.size() == 0: return dir_out = os.path.abspath(self.txt_output.get()) os.chdir(dir_out) # copy Nifti1 files filenames_b0 = self.lst_dwi.get_b0_filenames() for a_pair in filenames_b0: # test # frames in b0 image img = nib.load(a_pair[0]) if len(img.shape) < 4 or img.shape[3] > 1: # extract first frame a_pair[1] = os.path.join(dir_out, a_pair[1]) if os.path.lexists(a_pair[1]): os.remove(a_pair[1]) cmd = 'fslroi %s %s 0 1' % (a_pair[0], a_pair[1]) run_command(cmd) elif COPY_AS_SYMBOLIC_LINK: rel_path = abspath_to_relpath(dir_out, os.path.abspath(os.path.dirname(a_pair[0]))) if os.path.lexists(a_pair[1]): os.remove(a_pair[1]) os.symlink(os.path.join(rel_path, os.path.basename(a_pair[0])), a_pair[1]) a_pair[1] = os.path.join(dir_out, a_pair[1]) else: a_pair[1] = os.path.join(dir_out, a_pair[1]) shutil.copy(*a_pair) #print filenames_b0 filenames_dw = self.lst_dwi.get_dw_filenames() for a_pair in filenames_dw: if COPY_AS_SYMBOLIC_LINK: rel_path = abspath_to_relpath(dir_out, os.path.abspath(os.path.dirname(a_pair[0]))) if os.path.lexists(a_pair[1]): os.remove(a_pair[1]) os.symlink(os.path.join(rel_path, os.path.basename(a_pair[0])), a_pair[1]) a_pair[1] = os.path.join(dir_out, a_pair[1]) else: a_pair[1] = os.path.join(dir_out, a_pair[1]) shutil.copy(*a_pair) #print filenames_dw fn_b0s = [a_pair[1] for a_pair in filenames_b0] fn_dws = [a_pair[1] for a_pair in filenames_dw if extname(a_pair[1])[-5:-2] != '.bv'] print fn_dws dir_b0s = self.lst_dwi.get_b0_directions() dir_dws = self.lst_dwi.get_dw_directions() # merge Nifti1, bval, bvec files fn_out_b0 = self.txt_output_b0.get() if fn_out_b0 == '': fn_out_b0 = self.lst_dwi.fn_b0(self.txt_output_b0) fn_out_dw = self.txt_output_dw.get() if fn_out_dw == '': fn_out_dw = self.lst_dwi.fn_dw(self.txt_output_dw) create_merge(os.path.join(dir_out, fn_out_b0), fn_b0s, verbose=True) create_merge(os.path.join(dir_out, fn_out_dw), fn_dws, is_dwi=True, verbose=True) # create acqparams.txt, index.txt create_acqparams(os.path.join(dir_out,'%sacqparams.txt' % self.prefix()), fn_b0s, direction=dir_b0s) create_index(os.path.join(dir_out,'%sindex.txt' % self.prefix()), fn_b0s, fn_dws, dir_b0s, dir_dws) fn_dwi_filenames = os.path.join(dir_out, '%sDWI.txt' % self.prefix()) self.save_dwi_filenames(fn_dwi_filenames) print 'Done' if self.make_param: fn_param = os.path.join(dir_out, '%sparams' % self.prefix()) param = parameter.Parameter() if os.path.isfile(fn_param): param.read(fn_param) param.fn_b0 = os.path.join(dir_out, fn_out_b0) param.fn_dwi = os.path.join(dir_out, fn_out_dw) param.subject = self.subject() param.working_dir = dir_out param.save(fn_param) print 'Saved : %s' % fn_param if self.return_value is not None: self.return_value['subject'] = self.subject() self.return_value['b0'] = fn_out_b0 self.return_value['dwi'] = fn_out_dw self.return_value['output_directory'] = dir_out if self.obj_return_value is not None: self.obj_return_value.update_parameter() self.parent.destroy() return return self.subject(), dir_out, fn_out_b0, fn_out_dw
def __init__(self, log, dir, sky, scn, nrealize=1, nobs=1, clcDet=1, specRes=1.e9): self.log = log self.sky = sky self.scn = scn self.dir = dir self.configDir = self.dir + '/config' self.bandDir = self.configDir + '/Bands' self.name = dir.rstrip('/').split('/')[-1] #Store the camera parameters paramArr, valArr = np.loadtxt(self.configDir + '/camera.txt', dtype=np.str, unpack=True, usecols=[0, 2], delimiter='|') dict = { paramArr[i].strip(): valArr[i].strip() for i in range(len(paramArr)) } #Sample the camera paraemters def samp(param, pos=False, norm=False): if nrealize == 1: return param.getAvg() else: return param.sample(nsample=1, pos=pos, norm=norm) self.opticalCouplingToFP = samp(pr.Parameter(dict['Optical Coupling']), pos=True, norm=True) self.fnumber = samp(pr.Parameter(dict['F Number']), pos=True) self.Tb = samp(pr.Parameter(dict['Bath Temp']), pos=True) #Collect band files def bandDict(dir): bandFiles = sorted(gb.glob(dir + '/*')) if len(bandFiles): nameArr = [ nm.split('/')[-1].split('.')[0] for nm in bandFiles if "~" not in nm ] if len(nameArr): return { nameArr[i]: bandFiles[i] for i in range(len(nameArr)) } else: return None else: return None #Store optical chain object self.optBandDict = bandDict(self.bandDir + '/Optics') self.optChain = oc.OpticalChain(self.log, self.configDir + '/optics.txt', nrealize=nrealize, optBands=self.optBandDict) #Store channel objects self.detBandDict = bandDict(self.bandDir + '/Detectors') chans = np.loadtxt(self.configDir + '/channels.txt', dtype=np.str, delimiter='|') keyArr = chans[0] elemArr = chans[1:] self.chanDicts = [{ keyArr[i].strip(): elem[i].strip() for i in range(len(keyArr)) } for elem in elemArr] self.channels = [ ch.Channel(log, chDict, self, self.optChain, self.sky, self.scn, detBandDict=self.detBandDict, nrealize=nrealize, nobs=nobs, clcDet=clcDet, specRes=specRes) for chDict in self.chanDicts ] #Store pixel dictionary self.pixels = {} for channel in self.channels: if channel.pixelID in self.pixels.keys(): self.pixels[channel.pixelID].append(channel) else: self.pixels[channel.pixelID] = [channel]
def __init__(self, log, dict, nrealize=1, bandFile=None): self.__ph = ph.Physics() self.log = log self.bandFile = bandFile self.nrealize = nrealize #Store optic parameters self.element = dict['Element'] self.temper = pr.Parameter(dict['Temperature']) self.absorb = pr.Parameter(dict['Absorption']) self.refl = pr.Parameter(dict['Reflection']) self.thick = pr.Parameter(dict['Thickness'], un.mmToM) self.index = pr.Parameter(dict['Index']) self.lossTan = pr.Parameter(dict['Loss Tangent'], 1.e-04) self.conductivity = pr.Parameter(dict['Conductivity'], 1.e+06) self.surfaceRough = pr.Parameter(dict['Surface Rough'], un.umToM) self.spill = pr.Parameter(dict['Spillover']) self.spillTemp = pr.Parameter(dict['Spillover Temp']) self.scattFrac = pr.Parameter(dict['Scatter Frac']) self.scattTemp = pr.Parameter(dict['Scatter Temp'])
def main(): p = parameter.Parameter() datasets_save_dir = p.datasets_path model_save_dir = p.model_path split = p.datasets_split batch_size = p.batch_size learning_late = p.learning_late num_layer = p.num_layer epochs = p.epochs device = torch.device("cuda" if torch.cuda.is_available() else "cpu") print("CUDA is available:", torch.cuda.is_available()) if not os.path.exists(model_save_dir): os.mkdir(model_save_dir) now = datetime.datetime.now() model_path = model_save_dir + "/model_layer" + str( num_layer) + "_" + now.strftime('%Y%m%d_%H%M%S') + ".pt" _datasets_path = glob.glob(datasets_save_dir + "/*.npz") speech_list = [] addnoise_list = [] print("load npz data and transform it stacked tensor...") for file in tqdm(_datasets_path): d = np.load(file) speech = torch.from_numpy(d["speech"].astype(np.float32)).clone() addnoise = torch.from_numpy(d["addnoise"].astype(np.float32)).clone() speech_list.append(speech) addnoise_list.append(addnoise) num_data = len(speech_list) a = round(num_data, -2) if a > num_data: num_usedata = round(num_data - 100, -2) tensor_speech = torch.stack(speech_list[:num_usedata]) tensor_addnoise = torch.stack(addnoise_list[:num_usedata]) print("Available data :", num_data) print("Use data :", num_usedata) mydataset = utils.TensorDataset(tensor_speech, tensor_addnoise) data_num = tensor_speech.shape[0] data_split = [ int(data_num * split[0]), int(data_num * split[1]), int(data_num * split[2]) ] test_dataset, val_dataset, train_dataset = utils.random_split( mydataset, data_split) train_loader = utils.DataLoader(train_dataset, batch_size=batch_size, num_workers=os.cpu_count(), pin_memory=True, shuffle=True) val_loader = utils.DataLoader(val_dataset, batch_size=batch_size, num_workers=os.cpu_count(), pin_memory=True, shuffle=True) test_loader = utils.DataLoader(test_dataset, batch_size=batch_size, num_workers=os.cpu_count(), pin_memory=True, shuffle=True) # model feat = tensor_addnoise.shape[1] sequence = tensor_addnoise.shape[2] model = mm.Net(sequence, feat, num_layer).to(device) #loss/optimizer criterion = nn.L1Loss().to(device) # criterion = nn.MSELoss().to(device) optimizer = optim.Adam(model.parameters(), lr=learning_late) print( "#####################################################################" ) print(" Start Training..") print( "#####################################################################" ) train_loss_list = [] test_loss_list = [] for epoch in tqdm(range(1, epochs + 1), desc='[Training..]'): # Training model.train() # 訓練モードに train_loss = 0 for batch_idx, (speech, addnoise) in enumerate(train_loader): # データ取り出し speech, addnoise = speech.to(device), addnoise.to(device) optimizer.zero_grad() # 伝搬 mask = model(addnoise) #modelでmask自体を推定する h_hat = mask * addnoise #雑音つき音声にmaskを適応し所望音声を強調 # 損失計算とバックプロパゲーション loss = criterion(h_hat, speech) #強調音声 vs ラベル loss.backward() optimizer.step() train_loss += loss.item() train_loss /= len(train_loader.dataset) train_loss_list.append(train_loss) # Eval model.eval() test_loss = 0 with torch.no_grad(): for speech, addnoise in val_loader: # データ取り出し speech, addnoise = speech.to(device), addnoise.to(device) mask = model(addnoise) h_hat = mask * addnoise test_loss += criterion(h_hat, speech).item() # sum up batch loss test_loss /= len(test_loader.dataset) test_loss_list.append(test_loss) tqdm.write( '\nTrain set: Average loss: {:.6f}\nTest set: Average loss: {:.6f}' .format(train_loss, test_loss)) if epoch == 1: best_loss = test_loss torch.save(model.state_dict(), model_path) else: if best_loss > test_loss: torch.save(model.state_dict(), model_path) best_loss = test_loss if epoch % 10 == 0: #10回に1回定期保存 epoch_model_path = model_save_dir + "/model_layer" + str( num_layer) + "_" + now.strftime( '%Y%m%d_%H%M%S') + "_Epoch" + str(epoch) + ".pt" torch.save(model.state_dict(), epoch_model_path)
import parameter import NMF import time import numpy as np def result(data, w, h): print "UserMatrix=" print w print "ItemMatrixm=" print h print "RateMatrix=" print np.matrix(data.RateMatrix) print "ReMatrix=" print np.matrix(w * h) def run(data): data = parameter.Parameter() data.get_sample() w, h = NMF.factorize(data, 1000) result(data, w, h) if __name__ == '__main__': start = time.time() data = parameter.Parameter() run(data) elaspe_time = time.time() - start print "RunningTime:" + str(elaspe_time)
Created on Fri Dec 18 17:31:07 2020 @author: t.yamamoto """ import numpy as np import os import random from tqdm import tqdm from librosa.core import load, stft import utils as ut import parameter p = parameter.Parameter() audio_len = p.audio_len sample_rate = p.sample_rate clean_speech_dir = p.target_path noise_dir = p.noise_path datasets_save_dir = p.datasets_path fft_size = p.fft_size hop_length = p.hop_length def length_fitting(data, audio_len): if len(data) > audio_len: data = data[:audio_len] else:
def run(data): data = parameter.Parameter() data.get_sample() w, h = NMF.factorize(data, 1000) result(data, w, h)
win_length = 10 #Windowsize for FFT [Milli-Sekunden] hop_length = 10 #Hoplength for FFT [Milli-Sekunden] window = "hann" #type of window to use in stft function symmetry = False frequency_Axis = "linear" #frequency axis "linear" or "log" disp_ref = n_fft #reference value for spectrogramm n_mels = 40 #number of mel bands to use power = 2 #Exponent for Mel Spectrogram, 1 = Energy, 2 = Power mfccs = 20 #Number of MFCC Features to extract #Parameters are fused in an object displayParameter = parameter.Parameter(n_fft=n_fft, win_length=win_length, hop_length=hop_length, window=window, symmetry=symmetry, n_mels=n_mels, power=power, mfccs=mfccs, frequency_Axis=frequency_Axis, disp_Ref=disp_ref) #Path to the audio file to be displayed file = '/home/schoeffler/PycharmProjects/spectrogram_1/Audios/testfile.wav' # possible values: "Audio", "Spektrum", "Melspektrum", capitalization is not important plots = ["audio", "db_spektrum", "spektrum", "melspektrum", "mfccs"] def main(): Displayfunctions.display(file, displayParameter, plots)
pass def replot(self, XTrue=15, nReps=100, seed=None): self.sim(XTrue=XTrue, nReps=nReps, seed=seed) self.compute() self.plot() class likefun2(object): # Two dimensional likelihood grid def __init__(self, parent, XParam, YParam): self.parent = parent self.XParam = XParam self.YParam = YParam q0 = parameter.Parameter("q0", 0.5, "kHz", log=True) q1 = parameter.Parameter("q1", 0.25, "kHz", log=True) q = parameter.Parameter("q", 1. / 6., "kHz", log=True) T3 = toyProtocol([q0, q1]) T2 = toyProtocol([q]) FT3 = T3.flatten(seed=3) FT2 = T2.flatten(seed=3) XRange = numpy.arange(0.1, 30.1, 1) YRange = numpy.arange(0.11, 30.11, 1) # Different values so rate constants remain unequal #One dimensional likelihood plot with toy2 model #plt.figure() #LF2 = likefun1(T2,q) #LF2.setRange(XRange) #LF2.replot(XTrue=15.,seed=10,nReps=100)