def __init__(self): parser = SafeConfigParser() parser.read('config/sclstm.cfg') self.hidden_dim = parser.getint('generator', 'hidden_dim') self.num_epochs = parser.getint('generator', 'num_epochs') self.sequence_length = parser.getint('generator', 'sequence_length') self.sequences_per_batch = parser.getint('train_mode', 'sequences_per_batch') self.learning_rate = parser.getfloat('learn', 'learning_rate') self.momentum_as_time_constant = parser.getint( 'learn', 'momentum_as_time_constant') self.clipping_threshold_per_sample = parser.getfloat( 'learn', 'clipping_threshold_per_sample') self.token_to_id_path = parser.get('data', 'token_to_id_path') self.slot_value_path = parser.get('data', 'slot_value_path') self.train_file_path = parser.get('data', 'train_file_path') self.sv_file_path = parser.get('data', 'sv_file_path') self.model_file_path = parser.get('data', 'model_file_path') self.test_file_path = parser.get('data', 'test_file_path') self.sv_test_file_path = parser.get('data', 'sv_test_file_path') self.segment_begin = parser.get('generator', 'segment_begin') self.segment_end = parser.get('generator', 'segment_end') self.num_samples_between_progress_report = parser.get( 'train_mode', 'num_samples_between_progress_report') self.patience = parser.getint('train_mode', 'patience') self.overgen = parser.getint('gen', 'overgen') self.beamwidth = parser.getint('gen', 'beamwidth') self.decode = parser.get('gen', 'decode')
def read_output(filename): """ Read data file generated in the output folder. Arguments --------- rc : string Data file name Returns ------- return : tuple Tuple of all parameters stored in the file """ config = SafeConfigParser() config.read(filename) order = config.getint('data', 'order') Nx = config.getint('data', 'Nx') Nt = config.getint('data', 'Nt') T0 = config.getfloat('data', 'T0') T = config.getfloat('data', 'T') L = [float(f) for f in config.get('data', 'L').split()] rc = config.getfloat('data', 'rc') qc = config.getfloat('data', 'qc') rho = config.getfloat('data', 'rho') mesh_locations = config.get('data', 'mesh_locations').split(',') names = config.get('data', 'names').split(',') locations = config.get('data', 'locations').split(',') return order, Nx, Nt, T0, T, L, rc, qc, rho, mesh_locations,\ names, locations
def loadParams(filepath): config = SafeConfigParser({ 'color': '0', 'PreTrainNet': '', 'loopRestartFrequency': '-1' }) config.read(filepath) params = {} #device params['randomSeed'] = config.getint('device', 'randomSeed') #solver params['SolverType'] = config.get('solver', 'SolverType') params['lr'] = config.getfloat('solver', 'lr') params['momentum'] = config.getfloat('solver', 'momentum') params['lrDecay'] = config.getfloat('solver', 'lrDecay') params['batchSize'] = config.getint('solver', 'batchSize') params['weightDecay'] = config.getfloat('solver', 'weightDecay') #stopping crteria params['nMaxEpoch'] = config.getint('stopping', 'nMaxEpoch') params['nMaxIter'] = config.getint('stopping', 'nMaxIter') #the SA training would alternating between 'normalBatchLength' iteration of normal training and 'loopBatchLength' of self-augment training params['normalBatchLength'] = config.getint('loop', 'normalBatchLength') params['loopStartEpoch'] = config.getint('loop', 'loopStartEpoch') params['loopStartIteration'] = config.getint( 'loop', 'loopStartIteration') #add loop after this number of normal training. params['loopBatchLength'] = config.getint( 'loop', 'loopBatchLength' ) #how many mini-batch iteration for ever loop optimize params['loopRestartFrequency'] = config.getint('loop', 'loopRestartFrequency') #network structure params['NetworkType'] = config.get('network', 'NetworkType') params['Channal'] = config.get('network', 'Channal') params['BN'] = config.getboolean('network', 'BN') params['color'] = config.getboolean('network', 'color') params['PreTrainNet'] = config.get('network', 'PreTrainNet') #dataset params['NormalizeInput'] = config.getboolean('dataset', 'NormalizeInput') params['dataset'] = config.get('dataset', 'dataset') params['testDataset'] = config.get('dataset', 'testDataset') params['predictDataset'] = config.get('dataset', 'predictDataset') #display and testing params['displayStep'] = config.getint('display', 'displayStep') params['loopdisplayStep'] = config.getint('display', 'loopdisplayStep') params['checkPointStepIteration'] = config.getint( 'display', 'checkPointStepIteration') params['checkPointStepEpoch'] = config.getint('display', 'checkPointStepEpoch') return params
def __init__(self): cp = SafeConfigParser(defaults={ 'NX': 10001, 'XMAX': 10000, 'C0': 334, 'NT': 1001, 'DT': 0.001, 'Freq': 25, }) with open('ParFile') as f: try: cp.read_string('[global]\n' + f.read(), source='Par_file') except AttributeError: # Python 2 cp.readfp(FakeGlobalSectionHead(f)) self.nx = cp.getint('global','NX') self.xmax = cp.getint('global','XMAX') self.c0 = cp.getint('global','C0') self.nt = cp.getint('global','NT') self.dt = cp.getfloat('global','DT') self.gx = cp.getint('global','GX') self.freq = cp.getint('global','Freq') self.sx = cp.getint('global','SX') self.snap = cp.getint('global','SNAP')
def __init__(self,iniFile,expName,gridName,version,ClusterCosmology): Config = SafeConfigParser() Config.optionxform=str Config.read(iniFile) self.cc = ClusterCosmology bigDataDir = Config.get('general','bigDataDirectory') self.clttfile = Config.get('general','clttfile') self.constDict = dict_from_section(Config,'constants') self.clusterDict = dict_from_section(Config,'cluster_params') #version = Config.get('general','version') beam = list_from_config(Config,expName,'beams') noise = list_from_config(Config,expName,'noises') freq = list_from_config(Config,expName,'freqs') lknee = list_from_config(Config,expName,'lknee')[0] alpha = list_from_config(Config,expName,'alpha')[0] self.fsky = Config.getfloat(expName,'fsky') self.mgrid,self.zgrid,siggrid = pickle.load(open(bigDataDir+"szgrid_"+expName+"_"+gridName+ "_v" + version+".pkl",'rb')) #self.cc = ClusterCosmology(self.fparams,self.constDict,clTTFixFile=self.clttfile) self.SZProp = SZ_Cluster_Model(self.cc,self.clusterDict,rms_noises = noise,fwhms=beam,freqs=freq,lknee=lknee,alpha=alpha) self.HMF = Halo_MF(self.cc,self.mgrid,self.zgrid) self.HMF.sigN = siggrid.copy()
def __init__(self,config=None,opts=None): # not enough info to execute if config==None and opts==None: print("Please specify command option or config file ...") return # config parser parser = SafeConfigParser() parser.read(config) self.debug = parser.getboolean('knn','debug') self.seed = parser.getint('knn','random_seed') self.obj = 'dt' self.trainfile = parser.get('knn','train') self.validfile = parser.get('knn','valid') self.testfile = parser.get('knn','test') self.vocabfile = parser.get('knn','vocab') self.domain = parser.get('knn','domain') self.percentage = float(parser.getfloat('knn','percentage'))/100.0 # Setting generation specific parameters self.topk = parser.getint('knn','topk') self.detectpairs= parser.get('knn','detectpairs') self.verbose = parser.getint('knn','verbose') # set random seed np.random.seed(self.seed) random.seed(self.seed) np.set_printoptions(precision=4) # setting data reader, processors, and lexicon self.setupSideOperators()
def __init__(self, str_config, arch): config = SafeConfigParser() config.read(str_config) self.sections = config.sections() print(self.sections) if arch in self.sections: self.number_of_classes = config.getint(arch, "NUM_CLASSES") self.number_of_iterations = config.getint(arch, "NUM_ITERATIONS") self.dataset_size = config.getint(arch, "DATASET_SIZE") self.test_size = config.getint(arch, "TEST_SIZE") self.batch_size = config.getint(arch, "BATCH_SIZE") self.estimated_number_of_batches = int( float(self.dataset_size) / float(self.batch_size)) self.estimated_number_of_batches_test = int( float(self.test_size) / float(self.batch_size)) self.snapshot_time = config.getint(arch, "SNAPSHOT_TIME") self.test_time = config.getint(arch, "TEST_TIME") self.lr = config.getfloat(arch, "LEARNING_RATE") self.snapshot_prefix = config.get(arch, "SNAPSHOT_PREFIX") self.number_of_epochs = int( float(self.number_of_iterations) / float(self.estimated_number_of_batches)) self.data_dir = config.get(arch, "DATA_DIR") else: raise ValueError(" {} is not a valid section".format(arch))
def __init__(self, str_config, modelname): config = SafeConfigParser() config.read(str_config) self.sections = config.sections() if modelname in self.sections: try: self.modelname = modelname #self.arch = config.get(modelname, "ARCH") self.process_fun = 'default' if 'PROCESS_FUN' in config[modelname] is not None: self.process_fun = config[modelname]['PROCESS_FUN'] self.number_of_classes = config.getint(modelname, "NUM_CLASSES") self.number_of_epochs = config.getint(modelname, "NUM_EPOCHS") self.batch_size = config.getint(modelname, "BATCH_SIZE") self.snapshot_steps = config.getint(modelname, "SNAPSHOT_STEPS") #test time sets when test is run (in seconds) self.test_time = config.getint(modelname, "TEST_TIME") self.validation_steps = config.getint(modelname, "VALIDATION_STEPS") self.lr = config.getfloat(modelname, "LEARNING_RATE") #snapshot folder, where training data will be saved self.snapshot_prefix = config.get(modelname, "SNAPSHOT_DIR") self.data_dir = config.get(modelname, "DATA_DIR") self.channels = config.getint(modelname, "CHANNELS") assert (self.channels == 1 or self.channels == 3) except Exception: raise ValueError("something wrong with configuration file " + str_config) else: raise ValueError(" {} is not a valid section".format(modelname))
def parse_config(filepath, option): parser = SafeConfigParser() parser.read(filepath) # Loading optimizer arguments print('== ' + option + '.optim ==') optim_dict = {} section_name = option + '.optim' for key in parser.options(section_name): if key == 'optimizer': optim_name = parser.get(section_name, key) else: if key == 'nesterov' or key == 'amsgrad': value = parser.getboolean(section_name, key) else: value = parser.getfloat(section_name, key) optim_dict[key] = value optim_args = (optim_name, optim_dict) print('Optim name: {}'.format(optim_name)) for k, v in optim_dict.items(): print('{:<12} --> {}'.format(k, v)) # Loading model arguments print('== ' + option + '.model ==') int_default = dict(batch_size=32, save_period=20, patience=5, max_epochs=100) float_default = dict(k=1.0) str_default = dict(pretrain='') model_args = {} model_args.update(int_default) model_args.update(float_default) model_args.update(str_default) section_name = option + '.model' if parser.has_section(section_name): for key in parser.options(section_name): if key in int_default: value = parser.getint(section_name, key) elif key in float_default: value = parser.getfloat(section_name, key) else: value = parser.get(section_name, key) model_args[key] = value for k, v in model_args.items(): print('{:<12} --> {}'.format(k, v)) return optim_args, model_args
def initNet(self, config, opts=None): print('\n\ninit net from scrach ... ') # config parser parser = SafeConfigParser() parser.read(config) # setting learning hyperparameters self.debug = parser.getboolean('learn', 'debug') if self.debug: print('loading settings from config file ...') self.seed = parser.getint('learn', 'random_seed') self.lr_divide = parser.getint('learn', 'lr_divide') self.lr = parser.getfloat('learn', 'lr') self.lr_decay = parser.getfloat('learn', 'lr_decay') self.beta = parser.getfloat('learn', 'beta') self.min_impr = parser.getfloat('learn', 'min_impr') self.llogp = parser.getfloat('learn', 'llogp') # setting training mode self.mode = parser.get('train_mode', 'mode') self.obj = parser.get('train_mode', 'obj') self.gamma = parser.getfloat('train_mode', 'gamma') self.batch = parser.getint('train_mode', 'batch') # setting file paths if self.debug: print('loading file path from config file ...') self.wvecfile = parser.get('data', 'wvec') self.trainfile = parser.get('data', 'train') self.validfile = parser.get('data', 'valid') self.testfile = parser.get('data', 'test') self.vocabfile = parser.get('data', 'vocab') self.domain = parser.get('data', 'domain') self.percentage = float(parser.getfloat('data', 'percentage')) / 100.0 # Setting generation specific parameters self.topk = parser.getint('gen', 'topk') self.overgen = parser.getint('gen', 'overgen') self.beamwidth = parser.getint('gen', 'beamwidth') self.detectpairs = parser.get('gen', 'detectpairs') self.verbose = parser.getint('gen', 'verbose') self.decode = parser.get('gen', 'decode') # setting rnn configuration self.gentype = parser.get('generator', 'type') self.dh = parser.getint('generator', 'hidden') # set random seed np.random.seed(self.seed) random.seed(self.seed) np.set_printoptions(precision=4) # setting data reader, processors, and lexicon self.setupDelegates() # network size self.di = len(self.reader.vocab) # logp for validation set self.valid_logp = 0.0 # start setting networks self.initModel() self.model.config_theano()
def __init__(self): parser = SafeConfigParser() parser.read(CONFIG_FILE) self.classifier_pickle = parser.get(CONFIG_SECTION, "classifier_pickle") self.modelCNN = parser.get(CONFIG_SECTION, "model") self.embedding_file = parser.get(CONFIG_SECTION, "embedding_file") self.npy = parser.get(CONFIG_SECTION, "npy") self.outputframes = parser.get(CONFIG_SECTION, "outputframes") self.metadata = parser.get(CONFIG_SECTION, "metadata") self.gpu_memory_fraction = parser.getfloat(CONFIG_SECTION, "gpu_memory_fraction") self.margin = parser.getint(CONFIG_SECTION, "margin") self.frame_interval = parser.getint(CONFIG_SECTION, "frame_interval") self.image_size = parser.getint(CONFIG_SECTION, "image_size") self.input_image_size = parser.getint(CONFIG_SECTION, "input_image_size") self.batch_size = parser.getint(CONFIG_SECTION, "batch_size") self.minsize = parser.getint(CONFIG_SECTION, "minsize") self.factor = parser.getfloat(CONFIG_SECTION, "factor")
def fromConfigFile(filename): print('loading config from file: %s' % filename) config = SafeConfigParser() config.read(filename) ret = ServerConfig() ret.server_host = config.get('server', 'host') ret.server_port = config.getint('server', 'port') ret.whole_world_coords = [ config.getfloat('server', 'min_lon'), config.getfloat('server', 'min_lat'), config.getfloat('server', 'max_lon'), config.getfloat('server', 'max_lat') ] ret.db_host = config.get('db', 'host') ret.db_port = config.getint('db', 'port') ret.db_name = config.get('db', 'name') ret.verbose = config.get('debug', 'verbose') return ret
def __init__(self, config_path): # Read configuration file cfg = SafeConfigParser() cfg.read(config_path) # Case self.v_inf = cfg.getfloat('case', 'v_inf') self.rpm = cfg.getfloat('case', 'rpm') if cfg.has_option('case', 'twist'): self.twist = cfg.getfloat('case', 'twist') else: self.twist = 0.0 if cfg.has_option('case', 'coaxial'): self.coaxial = cfg.getboolean('case', 'coaxial') else: self.coaxial = False # Rotor if cfg.has_section('turbine'): self.mode = 'turbine' self.rotor = Rotor(cfg, 'turbine', self.mode) else: self.mode = 'rotor' self.rotor = Rotor(cfg, 'rotor', self.mode) # Fluid self.fluid = Fluid(cfg) # Output self.T = 0 # Thrust self.Q = 0 # Torque self.P = 0 # Power # Coaxial if self.coaxial: self.rpm2 = cfg.getfloat('case', 'rpm2') if cfg.has_option('case', 'twist2'): self.twist2 = cfg.getfloat('case', 'twist2') else: self.twist2 = 0.0 self.rotor2 = Rotor(cfg, 'rotor2', self.mode) self.zD = cfg.getfloat('case', 'dz') / self.rotor.diameter self.T2 = 0 self.Q2 = 0 self.P2 = 0 # Solver self.solver = 'bisect' self.Cs = 0.625 if cfg.has_section('solver'): self.solver = cfg.get('solver', 'solver') if cfg.has_option('solver', 'Cs'): self.Cs = cfg.getfloat('solver', 'Cs')
def extract_ssi(control_fname): """Extract SSI from a PyTOPKAPI simulation file. Read a PyTOPKAPI simulation file and it's associated parameter file and compute the Soil Saturation Index (SSI) for each model cell and timestep. The results are returned as a Numpy array. Parameters ---------- control_fname : string The file name of a PyTOPKAPI simulation control file. The name should contain the full path relative to the current directory. Returns ------- ssi : Numpy ndarray A Numpy array containing the calculated SSI values. """ config = SafeConfigParser() config.read(control_fname) global_param_fname = config.get('input_files', 'file_global_param') param_fname = config.get('input_files', 'file_cell_param') sim_fname = config.get('output_files', 'file_out') fac_L = config.getfloat('calib_params', 'fac_L') params = np.loadtxt(param_fname) glob_params = np.genfromtxt(global_param_fname, names=True) soil_depth = fac_L*params[:, 8] factor = params[:, 11] - params[:, 10] cell_area = glob_params['X']**2 # m^2 soil_depth = ma.masked_values(soil_depth, 0.0) factor = ma.array(factor, mask=soil_depth.mask) div = factor*soil_depth*cell_area tkpi_file = h5py.File(sim_fname, 'r') soil_vol = tkpi_file['/Soil/V_s'][...] tkpi_file.close() # ssi = (Vs/cell_vol)*100 # cell_vol = (theta_s - theta_r)*soil_depth*cell_area sv = ma.array(soil_vol, mask=soil_depth.mask) ssi = (sv/(div))*100.0 return ssi
def loadConfig(fileName="config.ini"): config = SafeConfigParser() config.read(fileName) info = {} try: info["botName"] = config.get('main', "botName") info["apiFileName"] = config.get('main', "apiFileName") info["pair"] = config.get('main', 'pair') info["days"] = config.getint('main', 'days') info["dipAmount"] = config.getfloat('main', 'dipAmount') info["amountToBuy"] = config.getfloat('main', 'amountToBuy') info["pollingDelay"] = config.getint('main', 'pollingDelay') info["debugOnConsole"] = config.getboolean('main', 'debugOnConsole') info["notifications"] = config.getboolean('notifications', 'notifications') if info["notifications"]: info["botId"] = config.get('notifications', 'botId') info["chatId"] = config.get('notifications', 'chatId') except Exception as exc: logging.critical("Exception " + str(exc.__class__.__name__) + " : " + str(exc)) return info
class Config(object): def __init__(self): self._configFileName = "rover.cfg" self._readConfig() def _readConfig(self): self._config = SafeConfigParser({ "db_host": "localhost", "db_port": "27017", "db_name": "rover", "serial_path": "/dev/ttymxc3", "serial_baud": "115200", "serial_timeout": "0.5", "serial_encoding": "windows-1252"}) self._config.read(self._configFileName) self._validateConfig() def _writeConfig(self): self._config.write(open(self._configFileName, "w")) def _validateConfig(self): changed = False if not self._config.has_section("db"): self._config.add_section("db") changed = True if not self._config.has_section("arduino_serial"): self._config.add_section("arduino_serial") changed = True if changed: self._writeConfig() def get(self, section, key): return self._config.get(section, key) def getInt(self, section, key): return self._config.getint(section, key) def getFloat(self, section, key): return self._config.getfloat(section, key) def getBoolean(self, section, key): return self._config.getboolean(section, key) def set(self, section, key, value): self._config.set(section, key, value) self._writeConfig()
def read_header(fname): with open(fname) as f: # setup config reader assert f.readline().strip() == \ 'Brain Vision Data Exchange Header File Version 1.0' lines = itertools.takewhile(lambda x: '[Comment]' not in x, f.readlines()) cfg = SafeConfigParser() cfg.readfp(StringIO(''.join(lines))) # get sampling info sample_rate = 1e6 / cfg.getfloat('Common Infos', 'SamplingInterval') nchann = cfg.getint('Common Infos', 'NumberOfChannels') log.info('Found sample rate of %.2f Hz, %d channels.' % (sample_rate, nchann)) # check binary format # assert cfg.get('Common Infos', 'DataOrientation') == 'MULTIPLEXED' # assert cfg.get('Common Infos', 'DataFormat') == 'BINARY' # assert cfg.get('Binary Infos', 'BinaryFormat') == 'INT_16' # TODO add eeg data type switch according to header data; export to read_eeg # load channel labels chan_lab = ['UNKNOWN'] * nchann chan_resolution = np.ones(nchann) * np.nan for (chan, props) in cfg.items('Channel Infos'): n = int(re.match(r'ch(\d+)', chan).group(1)) name, refname, resolution, unit = props.split(',')[:4] chan_lab[n - 1] = name chan_resolution[n - 1] = resolution # locate EEG and marker files eeg_fname = cfg.get('Common Infos', 'DataFile') marker_fname = cfg.get('Common Infos', 'MarkerFile') return dict(sample_rate=sample_rate, chan_lab=chan_lab, chan_resolution=chan_resolution, eeg_fname=eeg_fname, marker_fname=marker_fname)
def __init__(self, str_config, modelname): config = SafeConfigParser() config.read(str_config) self.sections = config.sections() if modelname in self.sections: try: self.modelname = modelname self.arch = config.get(modelname, "ARCH") self.process_fun = 'default' if 'PROCESS_FUN' in config[modelname] is not None: self.process_fun = config[modelname]['PROCESS_FUN'] self.number_of_classes = config.getint(modelname, "NUM_CLASSES") self.number_of_iterations = config.getint( modelname, "NUM_ITERATIONS") self.dataset_size = config.getint(modelname, "DATASET_SIZE") self.test_size = config.getint(modelname, "TEST_SIZE") self.batch_size = config.getint(modelname, "BATCH_SIZE") self.estimated_number_of_batches = int( float(self.dataset_size) / float(self.batch_size)) self.estimated_number_of_batches_test = int( float(self.test_size) / float(self.batch_size)) # snapshot time sets when temporal weights are saved (in steps) self.snapshot_time = config.getint(modelname, "SNAPSHOT_TIME") # test time sets when test is run (in seconds) self.test_time = config.getint(modelname, "TEST_TIME") self.lr = config.getfloat(modelname, "LEARNING_RATE") # snapshot folder, where training data will be saved self.snapshot_prefix = config.get(modelname, "SNAPSHOT_DIR") # number of estimated epochs self.number_of_epochs = int( float(self.number_of_iterations) / float(self.estimated_number_of_batches)) # folder where tf data is saved. Used for training and testing self.data_dir = config.get(modelname, "DATA_DIR") self.channels = config.getint(modelname, "CHANNELS") assert (self.channels == 1 or self.channels == 3) except Exception: raise ValueError("something wrong with configuration file " + str_config) else: raise ValueError(" {} is not a valid section".format(modelname))
def _load_config(self): "Load and parse config file, pass options to livestreamer" config = SafeConfigParser() config_file = os.path.join(self.config_path, 'settings.ini') config.read(config_file) for option, type in list(AVAILABLE_OPTIONS.items()): if config.has_option('DEFAULT', option): if type == 'int': value = config.getint('DEFAULT', option) if type == 'float': value = config.getfloat('DEFAULT', option) if type == 'bool': value = config.getboolean('DEFAULT', option) if type == 'str': value = config.get('DEFAULT', option) self.livestreamer.set_option(option, value)
class Config(object): def __init__(self, path_config=None): self.parser = SafeConfigParser() self.parser.read("./config/path.ini") if path_config is not None: if not os.path.exists(path_config): print("Error!: path_config=%s does not exist." % path_config) sys.exit(-1) self.parser.read(path_config) def getpath(self, key): return self.str2None(json.loads(self.parser.get("path", key))) def getint(self, key): return self.parser.getint("hyperparams", key) def getfloat(self, key): return self.parser.getfloat("hyperparams", key) def getbool(self, key): return self.parser.getboolean("hyperparams", key) def getstr(self, key): return self.str2None(json.loads(self.parser.get("hyperparams", key))) def getlist(self, key): xs = json.loads(self.parser.get("hyperparams", key)) xs = [self.str2None(x) for x in xs] return xs def getdict(self, key): xs = json.loads(self.parser.get("hyperparams", key)) for key in xs.keys(): value = self.str2None(xs[key]) xs[key] = value return xs def str2None(self, s): if isinstance(s, str) and s == "None": return None else: return s
def GetMIPS(): mips = MIPS() ports = serial.tools.list_ports.comports() trimmedPorts = [] for port in ports: if port[2] != 'n/a': trimmedPorts.append(port) config = SafeConfigParser() config.read(settingsPath) if not config.has_section(settingsSection): raise Exception("Can't found port") return port = config.get(settingsSection, settingsKeyPort) parity = config.get(settingsSection, settingsKeyParity) flow = config.getint(settingsSection, settingsKeyFlowControl) bits = config.getfloat(settingsSection, settingsKeyStopBits) intBits = 0 try: intBits = int(bits) except: intBits = -1 if intBits > 0: bits = intBits baud = config.getint(settingsSection, settingsKeyBaudRate) for p in trimmedPorts: if p[0] == port: res = mips.connect(portName=port, baudRate=baud, parity=parity, stopBits=bits, flowControl=flow) if res: return mips else: raise Exception("Can't connect to MIPS") raise Exception("Can't found port") return
def InitializeConfig(): print("Initializing config...") config = SafeConfigParser() config.read(CONFIG_FILE_PATH) # Create references to globals global mode global frame_capture_delay global video_capture_source global images_path global results_save_path global result_image_size #global video_stream_address global mask_images_path global cropped_images_path global labels_text_path global graph_path global post_request_url global similarity_score_threshold # Assign variables from config # Main Settings mode = Modes(config.getint("Settings", "mode")) frame_capture_delay = config.getint("Settings", "frame_capture_delay") video_capture_source = config.get("Settings", "video_capture_source") images_path = config.get("Settings", "images_path") results_save_path = config.get("Settings", "results_save_path") result_image_size = [int(i) for i in config.get("Settings", "result_image_size").split(',')] # List from config returns strings so it needs to be converted to list of ints #video_stream_address = config.get("Settings", "video_stream_address") mask_images_path = config.get("Settings", "mask_images_path") cropped_images_path = config.get("Settings", "cropped_images_path") labels_text_path = config.get("Settings", "labels_text_path") graph_path = config.get("Settings", "graph_path") post_request_url = config.get("Settings", "post_request_url") similarity_score_threshold = config.getfloat("Settings", "similarity_score_threshold")
def param(config_location): config = SafeConfigParser() config.read(config_location) # Constructor parameters rc = config.getfloat('Parameters', 'rc') qc = config.getfloat('Parameters', 'qc') Ru = config.getfloat('Parameters', 'Ru') Rd = config.getfloat('Parameters', 'Rd') L = config.getfloat('Parameters', 'L') k1 = config.getfloat('Parameters', 'k1') k2 = config.getfloat('Parameters', 'k2') k3 = config.getfloat('Parameters', 'k3') rho = config.getfloat('Parameters', 'rho') nu = config.getfloat('Parameters', 'nu') p0 = config.getfloat('Parameters', 'p0') # Geometry parameters Nt = config.getint('Geometry', 'Nt') Nx = config.getint('Geometry', 'Nx') N_cycles = config.getint('Geometry', 'N_cycles') root_vessel = True end_vessel = True T = 1.0 q0 = 2.0 theta = 0.51 # Adimensionalise parameters R1 = R2 = CT = 0 Ru, Rd, L, k1, k2, k3, Re, nu, p0, R1, R2, CT, q0, T = \ nondimensionalise_parameters(rc, qc, Ru, Rd, L, k1, k2, k3, rho, nu, p0, R1, R2, CT, q0, T) return root_vessel, end_vessel, rc, qc, Ru, Rd, L, k1, k2, k3, rho, Re, nu,\ p0, Nt, Nx, T, N_cycles, q0, theta
logging.basicConfig(level=logging.INFO) # config stuff config = SafeConfigParser() config.read('config.ini') # bot config bot_mode = config['bot']['mode'] debug = config.getboolean('bot', 'debug') token = config['bot']['token'] clientid = config['bot']['clientid'] prefix = str(config['bot']['prefix']) botperms = config['bot']['botperms'] # policy denythreshold = config.getfloat('policy', 'denythreshold') accepted_formats = config['policy']['acceptedfiles'].split(',') allowtie = config.getboolean('policy', 'allow_ties') bot = commands.Bot(command_prefix=prefix) # nude detector is not used unless in debug, so you can see why some images are deemed nsfw detector = NudeDetector() classifier = NudeClassifier() # cog loading @bot.event async def on_ready(): invlink = f"https://discord.com/oauth2/authorize?client_id={clientid}&permissions={botperms}&scope=bot" print(f"lewd stopper is ready, invite me at link {invlink}") if debug:
# Init config = SafeConfigParser() config.read('config.ini') data_dir = config.get('Test', 'data_dir') data_file_name = config.get('Test', 'data_file_name') freq_file_name = config.get('Test', 'freq_file_name') power_file_name = config.get('Test', 'power_file_name') sig_gen_ip = config.get('IP', 'sig_gen_ip') sig_gen_clk_ip = config.get('IP', 'sig_gen_clk_ip') afc_idn = config.get('EPICS_IDN', 'afc_epics_idn') sig_ana_idn = config.get('EPICS_IDN', 'sig_ana_epics_idn') num_samples = config.getint('Test', 'num_samples') fs = config.getfloat('Test', 'fs') adc_resolution_bits = config.getint('Test', 'adc_resolution_bits') sig_gen_level = config.get('Test', 'sig_gen_level') sig_gen_clk_level = config.get('Test', 'sig_gen_clk_level') bpm_channel = config.get('Test', 'bpm_channel') power_array = [] # initializing array that receives power of each freq sig_gen, sig_gen_clk, bpm = init_instruments( sig_gen_config=[sig_gen_ip, 'visa'], sig_gen_clk_config=[sig_gen_clk_ip, 'visa'], bpm_config=[afc_idn, 'epics']) create_data_dir(data_dir) sys.stdout.write("\nRunning test...\n\n")
pyplot.ylabel('Power Normalized Magnetude [dBFS]') pyplot.xlabel('Sampling Frequency [Hz]') return pyplot ###################################################################### # Main script config = SafeConfigParser() config.read('config.ini') data_dir = config.get('Test','data_dir') data_file_name = config.get('Test','data_file_name') power_file_name = config.get('Test','power_file_name') power_plot_name = config.get('Test','power_plot_name') num_samples = config.getint('Test','num_samples') fs = config.getfloat('Test','fs') # sampling frequency in Hz adc_resolution_bits = config.getint('Test','adc_resolution_bits') sys.stdout.write("\nRunning test...\n\n") # Initialize arrays that will receive data in each loop cycle fund_power_array = [] fsig_array = [] # Load power and frequency data data = numpy.loadtxt(data_dir + power_file_name, delimiter = ',', skiprows = 1) fsig_array = [d[0] for d in data] fund_power_array = [d[1] for d in data] power_array = []
6: 'Australia'} ##################################################################################### # Tags to be modified by user ##################################################################################### do_email = parser.getboolean('CONTROL', 'do_email') email_list = ast.literal_eval(parser.get('CONTROL', 'email_list')) SHFT_MAP = parser.get('CONTROL', 'SHFT_MAP') # Use Andreas or Butler? MOVIE_SEP = 10 do_LUH1 = parser.getboolean('CONTROL', 'do_LUH1') PLOT_HYDE = parser.getboolean('CONTROL', 'PLOT_HYDE') PREPROCESS_GCAM = parser.getboolean('CONTROL', 'PREPROCESS_GCAM') PREPROCESS_IMAG = parser.getboolean('CONTROL', 'PREPROCESS_IMAG') CONVERT_WH = parser.getboolean('CONTROL', 'CONVERT_WH') # Convert WH information from AEZ to country level ending_diag_cols = ast.literal_eval(parser.get('CONTROL', 'ending_diag_cols')) MATURITY_AGE = parser.getfloat('CONTROL', 'MATURITY_AGE') # Directories input_dir = dir_prj + os.sep + parser.get('GLM', 'path_input') gcam_dir = input_dir + os.sep + parser.get('PATHS', 'gcam_dir') + os.sep out_dir = dir_prj + os.sep + parser.get('PATHS', 'out_dir') + os.sep + parser.get('PROJECT', 'project_name') + os.sep log_dir = out_dir + os.sep + 'Logs' codes_dir = input_dir + os.sep + parser.get('PATHS', 'codes_dir') # Continent and country codes # project-specific constants TAG = parser.get('PROJECT', 'TAG') CROPS = ast.literal_eval(parser.get('GCAM', 'CROPS')) PASTURE = ast.literal_eval(parser.get('GCAM', 'PASTURE')) FOREST = ast.literal_eval(parser.get('GCAM', 'FOREST')) URBAN = ast.literal_eval(parser.get('GCAM', 'URBAN')) FNF_DEFN = 2.0 # Forest/Non-forest definition
def run(ini_file='TOPKAPI.ini', verbose=False, quiet=False, parallel_exec=True, nworkers=int(mp.cpu_count()-1)): """Run the model. Parameters ---------- ini_file : str The name of the PyTOPKAPI initialization file. This file describes the locations of the parameter files and model setup options. Default is to use a file named `TOPKAPI.ini` in the current directory. verbose : bool Prints runtime information [default False - don't display runtime info]. Is independent of the `quiet` keyword argument. quiet : bool Toggles whether to display an informational banner at runtime [default False - display banner]. Is independent of the `verbose` keyword argument. nworkers : int Number of worker processes to spawn for solving each cell's time-series in parallel. Default is one fewer than CPU count reported by multiprocessing. """ ##================================## ## Read the input file (*.ini) ## ##================================## config = SafeConfigParser() config.read(ini_file) ##~~~~~~ Numerical_options ~~~~~~## solve_s = config.getint('numerical_options', 'solve_s') solve_o = config.getint('numerical_options', 'solve_o') solve_c = config.getint('numerical_options', 'solve_c') ##~~~~~~~~~~~ input files ~~~~~~~~~~~## #Param file_global_param = config.get('input_files', 'file_global_param') file_cell_param = config.get('input_files', 'file_cell_param') #Rain file_rain = config.get('input_files', 'file_rain') #ETP file_ET = config.get('input_files', 'file_ET') #~~~~~~~~~~~ Group (simulated event) ~~~~~~~~~~~## group_name = config.get('groups', 'group_name') ##~~~~~~ Calibration ~~~~~~## fac_L = config.getfloat('calib_params', 'fac_L') fac_Ks = config.getfloat('calib_params', 'fac_Ks') fac_n_o = config.getfloat('calib_params', 'fac_n_o') fac_n_c = config.getfloat('calib_params', 'fac_n_c') ##~~~~~~ External flows ~~~~~~## external_flow = config.getboolean('external_flow', 'external_flow') if external_flow: file_Qexternal_flow = config.get('external_flow', 'file_Qexternal_flow') Xexternal_flow = config.getfloat('external_flow', 'Xexternal_flow') Yexternal_flow = config.getfloat('external_flow', 'Yexternal_flow') ##~~~~~~~~~~~ output files ~~~~~~~~~~## file_out = config.get('output_files', 'file_out') ut.check_file_exist(file_out) #create path_out if it doesn't exist if os.path.exists(file_out): first_run = False else: first_run = True append_output = config.getboolean('output_files', 'append_output') if append_output is True: fmode = 'a' else: fmode = 'w' ##============================## ## Read the forcing data ## ##============================## if verbose: print('Read the forcing data') #~~~~Rainfall h5_rain = h5py.File(file_rain) dset_name = '/{}/rainfall'.format(group_name) rainfall_forcing = h5_rain[dset_name][...] h5_rain.close() #~~~~ETr - Reference crop ET h5_ET = h5py.File(file_ET) dset_name = '/{}/ETr'.format(group_name) ETr_forcing = h5_ET[dset_name][...] #~~~~ETo - Open water potential evap. dset_name = '/{}/ETo'.format(group_name) ET0_forcing = h5_ET[dset_name][...] h5_ET.close() #~~~~external_flow flows if external_flow: external_flow_records = np.loadtxt(file_Qexternal_flow)[:, 5] else: external_flow_records = None ##============================## ## Pretreatment of input data ## ##============================## if verbose: print('Pretreatment of input data') #~~~~Read Global parameters file X, Dt, alpha_s, \ alpha_o, alpha_c, \ A_thres, W_min, W_max = pm.read_global_parameters(file_global_param) #~~~~Read Cell parameters file ar_cell_label, ar_coorx, \ ar_coory, channel_flag, \ Xc, ar_dam, \ ar_tan_beta, ar_tan_beta_channel, \ ar_L, Ks, \ ar_theta_r, ar_theta_s, \ ar_n_o, ar_n_c, \ ar_cell_down, ar_pVs_t0, \ ar_Vo_t0, ar_Qc_t0, \ Kc, psi_b, lamda = pm.read_cell_parameters(file_cell_param) #~~~~Number of cell in the catchment nb_cell = len(ar_cell_label) #~~~~Computation of cell order node_hierarchy = pm.compute_node_hierarchy(ar_cell_label, ar_cell_down) ar_label_sort = pm.sort_cell(ar_cell_label, ar_cell_down) #~~~~Computation of upcells li_cell_up = pm.direct_up_cell(ar_cell_label, ar_cell_down, ar_label_sort) #~~~~Computation of drained area ar_A_drained = pm.drained_area(ar_label_sort, li_cell_up, X) #~~~~Apply calibration factors to the parameter values ar_L = ar_L*fac_L Ks = Ks*fac_Ks ar_n_o = ar_n_o*fac_n_o ar_n_c = ar_n_c*fac_n_c if verbose: print('Max L=', max(ar_L)) print('Max Ks=', max(Ks)) print('Max n_o=', max(ar_n_o)) print('Max n_c=', max(ar_n_c)) #~~~~Computation of model parameters from physical parameters Vsm, b_s, b_o, \ W, b_c = pm.compute_cell_param(X, Xc, Dt, alpha_s, alpha_o, alpha_c, nb_cell, A_thres, W_max, W_min, channel_flag, ar_tan_beta, ar_tan_beta_channel, ar_L, Ks, ar_theta_r, ar_theta_s, ar_n_o, ar_n_c, ar_A_drained) #~~~~Look for the cell of external_flow tunnel if external_flow: cell_external_flow = ut.find_cell_coordinates(ar_cell_label, Xexternal_flow, Yexternal_flow, ar_coorx, ar_coory, channel_flag) if verbose: print('external flows will be taken into account for cell no',\ cell_external_flow, ' coordinates ('\ ,Xexternal_flow,',',Yexternal_flow,')') else: cell_external_flow = None #~~~~Number of simulation time steps nb_time_step = rainfall_forcing.shape[0] ##=============================## ## Variable array definition ## ##=============================## ## Initialisation of the reservoirs #Matrix of soil,overland and channel store at the begining of the time step if append_output and not first_run: if verbose: print('Initialize from simulation file') h5file_in = h5py.File(file_out) Vs_t0 = h5file_in['/Soil/V_s'][-1, :] Vc_t0 = h5file_in['/Channel/V_c'][-1, :] Vo_t0 = h5file_in['/Overland/V_o'][-1, :] h5file_in.close() else: if verbose: print('Initialize from parameters') Vs_t0 = fl.initial_volume_soil(ar_pVs_t0, Vsm) Vo_t0 = ar_Vo_t0 Vc_t0 = fl.initial_volume_channel(ar_Qc_t0, W, X, ar_n_c) ##=============================## ## HDF5 output file definition ## ##=============================## h5file, dset_Vs, dset_Vo, dset_Vc, \ dset_Qs_out, dset_Qo_out, dset_Qc_out, \ dset_Q_down, dset_ET_out, dset_Ec_out \ = ut.open_simulation_file(file_out, fmode, Vs_t0, Vo_t0, Vc_t0, no_data, nb_cell, nb_time_step, append_output, first_run) eff_theta = ar_theta_s - ar_theta_r ##===========================## ## Core of the Model ## ##===========================## if not quiet: ut.show_banner(ini_file, nb_cell, nb_time_step) progress_desc = 'Simulation' else: progress_desc = 'PyTOPKAPI v{}'.format(pytopkapi.__version__) # prepare parameter dict exec_params = {'nb_cell': nb_cell, 'nb_time_step': nb_time_step, 'progress_desc': progress_desc, 'Dt': Dt, 'rainfall_forcing': rainfall_forcing, 'ETr_forcing': ETr_forcing, 'ET0_forcing': ET0_forcing, 'psi_b': psi_b, 'lamda': lamda, 'eff_theta': eff_theta, 'Ks': Ks, 'X': X, 'b_s': b_s, 'b_o': b_o, 'b_c': b_c, 'alpha_s': alpha_s, 'alpha_o': alpha_o, 'alpha_c': alpha_c, 'Vs_t0': Vs_t0, 'Vo_t0': Vo_t0, 'Vc_t0': Vc_t0, 'Vsm': Vsm, 'dset_Vs': dset_Vs, 'dset_Vo': dset_Vo, 'dset_Vc': dset_Vc, 'dset_Qs_out': dset_Qs_out, 'dset_Qo_out': dset_Qo_out, 'dset_Qc_out': dset_Qc_out, 'dset_Q_down': dset_Q_down, 'dset_ET_out': dset_ET_out, 'dset_Ec_out': dset_Ec_out, 'solve_s': solve_s, 'solve_o': solve_o, 'solve_c': solve_c, 'channel_flag': channel_flag, 'W': W, 'Xc': Xc, 'Kc': Kc, 'cell_external_flow': cell_external_flow, 'external_flow_records': external_flow_records, 'node_hierarchy': node_hierarchy, 'li_cell_up': li_cell_up, 'nworkers': nworkers} if not parallel_exec: # Serial execution. Solve by timestep in a single process. # Outer loop timesteps - inner loop cells _serial_execute(exec_params) else: # Parallel execution. Solve by cell using multiple processes. # Outer loop cells - inner loop timesteps _parallel_execute(exec_params) h5file.close()
pyplot.xlabel('Sampling Frequency [Hz]') return pyplot ###################################################################### # Main script config = SafeConfigParser() config.read('config.ini') data_dir = config.get('Test', 'data_dir') data_file_name = config.get('Test', 'data_file_name') power_file_name = config.get('Test', 'power_file_name') power_plot_name = config.get('Test', 'power_plot_name') num_samples = config.getint('Test', 'num_samples') fs = config.getfloat('Test', 'fs') # sampling frequency in Hz adc_resolution_bits = config.getint('Test', 'adc_resolution_bits') sys.stdout.write("\nRunning test...\n\n") # Initialize arrays that will receive data in each loop cycle fund_power_array = [] fsig_array = [] # Load power and frequency data data = numpy.loadtxt(data_dir + power_file_name, delimiter=',', skiprows=1) fsig_array = [d[0] for d in data] fund_power_array = [d[1] for d in data] power_array = []
Config.read(iniFile) version = Config.get('general', 'version') fparams = {} # the for (key, val) in Config.items('params'): if ',' in val: param, step = val.split(',') fparams[key] = float(param) else: fparams[key] = float(val) # load the mass calibration grid mexprange, zrange, lndM = pickle.load(open(calFile, "rb"), encoding='latin1') bigDataDir = Config.get('general', 'bigDataDirectory') pzcutoff = Config.getfloat('general', 'photoZCutOff') mgrid, zgrid, siggrid = pickle.load(open( bigDataDir + "szgrid_" + expName + "_" + gridName + "_v" + version + ".pkl", 'rb'), encoding='latin1') assert np.all(mgrid == mexprange) assert np.all(zrange == zgrid) saveId = expName + "_" + gridName + "_" + calName + "_v" + version from orphics.io import dict_from_section, list_from_config constDict = dict_from_section(Config, 'constants') clusterDict = dict_from_section(Config, 'cluster_params') beam = list_from_config(Config, expName, 'beams')
class ZFind: def __init__(self, num_z=5, inifile=None, dest=None, nproc=1, mask=False, overwrite=True, zrange=None): self.num_z = num_z self.inifile = inifile self.dest = dest self.overwrite = overwrite if self.inifile: self.set_templates_from_inifile() if zrange: self.zmin = [zrange[0]] * len(self.templates) self.zmax = [zrange[1]] * len(self.templates) self.nproc = nproc self.mask = mask def set_templates_from_inifile(self): self.labels = [] self.templates = [] self.zmin = [] self.zmax = [] self.npoly = [] self.npixstep = [] self.group = [] if exists(self.inifile): self.option = SafeConfigParser() self.option.optionxform = str r = self.option.read(self.inifile) if len(r) == 1: for section in self.option.sections(): self.labels.append(section) if self.option.has_option(section,'template'): self.templates.append(self.option.get(section, 'template')) if self.option.has_option(section,'zmin'): self.zmin.append(self.option.getfloat(section,'zmin')) if self.option.has_option(section,'zmax'): self.zmax.append(self.option.getfloat(section,'zmax')) if self.option.has_option(section,'npoly'): self.npoly.append(self.option.getint(section,'npoly')) if self.option.has_option(section,'npixstep'): self.npixstep.append(self.option.getint(section, 'npixstep')) if self.option.has_option(section,'group'): self.group.append(self.option.get(section,'group')) else: print("Cannot parse ini file %r" % self.inifile) if not self.labels: self.labels = None if not self.templates: self.templates = None if not self.zmin: self.zmin = None if not self.zmax: self.zmax = None if not self.npoly: self.npoly = None if not self.npixstep: self.npixstep = None if not self.group: self.group = None self.set_templates() else: print("WARNING: %r does not exist" % self.inifile) def set_templates(self, templates=None, zmin=None, zmax=None, npoly=None, npixstep=None, group=None): if templates: self.templates = templates if zmin: self.zmin = zmin if zmax: self.zmax = zmax if npoly: self.npoly = npoly if npixstep: self.npixstep = npixstep if group: self.group = group if type(self.templates) is str: try: self.templates = [self.templates] except Exception as e: print('Templates not a list and unable to convert to list! \ Exception: %r' % e) sys.exit(1) if type(self.templates) is list: self.templates = list(map(str, self.templates)) if self.zmin is not None: if type(self.zmin) is not list: try: self.zmin = [self.zmin] except: try: self.zmin = self.zmin.tolist() except Exception as e: print('Can\'t convert zmin to list - defaulting to \ full zrange! Exception: %r' % e) self.zmin = None self.zmax = None if type(self.zmin) is list: if len(self.zmin) != len(self.templates): print('Length of zmin doesn\'t match length of templates - \ defaulting to full zrange!') self.zmin = None self.zmax = None if self.zmax is None: print('zmax not given - defaulting to full zrange!') self.zmin = None self.zmax = None else: if type(self.zmax) is not list: try: self.zmax = [self.zmax] except: try: self.zmax = self.zmax.tolist() except Exception as e: print('Can\'t convert zmax to list - \ defaulting to full zrange! \ Exception: %r' % e) self.zmin = None self.zmax = None if len(self.zmin) != len(self.zmax): print('Length of zmin and zmax don\'t match - \ defaulting to full zrange!') self.zmin = None self.zmax = None #import pdb; pdb.set_trace() if self.npoly is None: self.npoly = [4]*len(self.templates) if self.group is None: self.group = [0]*len(self.templates) else: if type(self.npoly) is not list: try: self.npoly = [self.npoly] except: try: self.npoly = self.npoly.tolist() except Exception as e: print('npoly not a list and unable to convert to \ list - defaulting to npoly=4 for all \ templates! Exception: %r' % e) self.npoly = [4]*len(self.templates) else: self.npoly = list(map(int, self.npoly)) if self.npixstep is None: self.npixstep = [1]*len(self.templates) else: if type(self.npixstep) is not list: try: self.npixstep = [self.npixstep] except: try: self.npixstep = self.npixstep.tolist() except Exception as e: print('npixstep not a list and unable to convert to \ list - defaulting to npixstep=1 for all \ templates! Exception: %r' % e) self.npixstep = [1]*len(self.templates) else: self.npixstep = list(map(int, self.npixstep)) def reduce_plate_mjd(self, plate=None, mjd=None, fiberid=None, data_range=None, chi2file=False, platepath=None): print "\nPlate %s MJD %s Fiber %s" % (plate, mjd, fiberid) self.chi2file = chi2file # Check types and try to convert to proper types if necessary if fiberid is not None: if type(fiberid) is not list: try: fiberid = [fiberid] fiberid = list(map(int, fiberid)) except ValueError: try: fiberid = fiberid.tolist() fiberid = list(map(int, fiberid)) except ValueError: print('fiberid not set properly - running full plate!') else: fiberid = list(map(int, fiberid)) # Spec specs = spec.Spec(plate=plate, mjd=mjd, fiberid=fiberid, platepath=platepath) fiberid = specs.fiberid # ZFinder, ZFitter zfindobjs = [] zfitobjs = [] if (self.zmin is not None) & (self.zmax is not None): for i in range(len(self.templates)): zfindobjs.append( zfinder.ZFinder(fname=self.templates[i], group=self.group[i], npoly=self.npoly[i], zmin=self.zmin[i], zmax=self.zmax[i], nproc=self.nproc) ) if self.mask: zfindobjs[i].zchi2( specs.flux, specs.loglambda, specs.ivar, npixstep=self.npixstep[i], plate=plate, mjd=mjd, fiberid=fiberid[0], chi2file=self.chi2file, linelist=__linelist__) else: zfindobjs[i].zchi2( specs.flux, specs.loglambda, specs.ivar, npixstep=self.npixstep[i], plate=plate, mjd=mjd, fiberid=fiberid[0], chi2file=self.chi2file ) zfitobjs.append( zfitter.ZFitter(zfindobjs[i].zchi2arr, zfindobjs[i].zbase) ) zfitobjs[i].z_refine2() else: for i in range(len(self.templates)): zfindobjs.append( zfinder.ZFinder(fname=self.templates[i], group=self.group[i], npoly=self.npoly[i], npixstep=self.npixstep[i], nproc=self.nproc) ) if mask: zfindobjs[i].zchi2( specs.flux, specs.loglambda, specs.ivar, npixstep=self.npixstep[i], plate=plate, mjd=mjd, fiberid=fiberid[0], chi2file=self.chi2file, linelist=__linelist__) else: zfindobjs[i].zchi2( specs.flux, specs.loglambda, specs.ivar, npixstep=self.npixstep[i], plate=plate, mjd=mjd, fiberid=fiberid[0], chi2file=self.chi2file) zfitobjs.append( zfitter.ZFitter(zfindobjs[i].zchi2arr, zfindobjs[i].zbase) ) zfitobjs[i].z_refine2() # Flags flags = [] for i in range(len(zfindobjs)): flags.append( misc.comb_flags(specs, zfindobjs[i], zfitobjs[i]) ) # ZPicker zpick = zpicker2.ZPicker(specs, zfindobjs, zfitobjs, flags) output = None # Write output if self.dest is None: output = io2.WriteRedmonster(zpick, overwrite=True) else: if type(self.dest) is str: output = io2.WriteRedmonster(zpick, dest=self.dest, overwrite=True) else: try: self.dest = str(self.dest) output = io2.WriteRedmonster(zpick, dest=self.dest, overwrite=True) except Exception as e: print('Could not convert dest to string - writing to \ default directory and NOT clobbering old files! \ Exception: %r' % e) output = io2.WriteRedmonster(zpick, overwrite=True) if output: if len(zpick.fiberid) == 1: output.write_fiber() else: output.write_plate() def reduce_gen(self, filepath=None, data_range=None, chi2file=False): self.chi2file = False if filepath is None: print("Invalid file path.") else: specs = io2.SpecGen(filepath=filepath) zfindobjs = [] zfitobjs = [] if (self.zmin is not None) & (self.zmax is not None): for i in range(len(self.templates)): zfindobjs.append( zfinder.ZFinder(fname=self.templates[i], group=self.group[i], npoly=self.npoly[i], zmin=self.zmin[i], zmax=self.zmax[i], nproc=self.nproc) ) zfindobjs[i].zchi2(specs.flux, specs.loglambda, specs.ivar, npixstep=self.npixstep[i], chi2file=self.chi2file) zfitobjs.append( zfitter.ZFitter(zfindobjs[i].zchi2arr, zfindobjs[i].zbase) ) zfitobjs[i].z_refine2() else: for i in range(len(self.templates)): zfindobjs.append( zfinder.ZFinder(fname=self.templates[i], group=self.group[i], npoly=self.npoly[i], npixstep=self.npixstep[i], nproc=self.nproc) ) zfindobjs[i].zchi2( specs.flux, specs.loglambda, specs.ivar, npixstep=self.npixstep[i], chi2file=self.chi2file ) zfitobjs.append( zfitter.ZFitter(zfindobjs[i].zchi2arr, zfindobjs[i].zbase) ) zfitobjs[i].z_refine2() # Flags flags = [] for i in range(len(zfindobjs)): flags.append( misc.comb_flags(specs, zfindobjs[i], zfitobjs[i])) # ZPicker zpick = zpicker2.ZPicker(specs, zfindobjs, zfitobjs, flags) output = None # Write output if self.dest is None: output = io2.WriteRedmonster(zpick, dest=filepath, overwrite=True) if output: output.write_gen()
ax2 = plt.subplot(212) plt.plot(freq, phase) plt.grid('on') plt.title('FFT - Phase') plt.ylabel('Phase [degrees]') plt.xlabel('Frequency [Hz]') return plt ##################################################################### # Main script # Init config = SafeConfigParser() config.read('config.ini') fs = config.getfloat('Test','fs') # sampling frequency in Hz adc_resolution_bits = config.getint('Test','adc_resolution_bits') amp_threshold = config.getfloat('Test','amp_threshold') data_dir = config.get('Test','data_dir') data_file_name = config.get('Test','data_file_name') freq_file_name = config.get('Test','freq_file_name') power_file_name = config.get('Test','power_file_name') # Load power and frequency data data = numpy.loadtxt(data_dir + power_file_name, delimiter = ',', skiprows = 1) fsig_array = [d[0] for d in data] fund_power_array = [d[1] for d in data] # Generate fourier series for each frequency and plot for i, power in enumerate(fund_power_array):
dCls = {} for paramName in paramList: dCls[paramName] = tryLoad(derivRoot+'_dCls_'+paramName+'.csv',',') # Get CMB noise functions and ell ranges. if (noise_func_tt is None) or (noise_func_ee is None): fnTT, fnEE = noiseFromConfig(Config,expName,TCMB=TCMB,beamsOverride=None,noisesOverride=None,lkneeTOverride=None,lkneePOverride=None,alphaTOverride=None,alphaPOverride=None) tellmin,tellmax = list_from_config(Config,expName,'tellrange') pellmin,pellmax = list_from_config(Config,expName,'pellrange') if (noise_func_tt is not None): fnTT = cosmology.noise_pad_infinity(noise_func_tt,tellmin,tellmax) if (noise_func_ee is not None): fnEE = cosmology.noise_pad_infinity(noise_func_ee,pellmin,pellmax) # Pad CMB lensing noise with infinity outside L ranges kellmin,kellmax = list_from_config(Config,'lensing','Lrange') fnKK = cosmology.noise_pad_infinity(interp1d(ls,Nls,fill_value=np.inf,bounds_error=False),kellmin,kellmax) # Decide on what ell range to calculate the Fisher matrix ellrange = np.arange(min(tellmin,pellmin,kellmin),max(tellmax,pellmax,kellmax)).astype(int) # Get fsky fsky = Config.getfloat(expName,'fsky') # Calculate the Fisher matrix and add to other Fishers Fisher = calcFisher(paramList,ellrange,fidCls,dCls,lambda x: fnTT(x)*TCMB**2.,lambda x: fnEE(x)*TCMB**2.,fnKK,fsky,verbose=True) np.savetxt(saveName,Fisher)
def main(): ''' Main function for this module. Parses all command line arguments, reads in data from stdin, and sends it to the proper BLS algorithm. ''' # This is a global list of default values that will be used by the argument parser # and the configuration parser. defaults = {'min_duration':'0.0416667', 'max_duration':'0.5', 'n_bins':'100', 'direction':'0', 'mode':'vec', 'print_format':'encoded', 'verbose':'0', 'profiling':'0'} # Set up the parser for command line arguments and read them. parser = __init_parser(defaults) args = parser.parse_args() if not args.config: # No configuration file specified -- read in command line arguments. if not args.segment: parser.error('No trial segment specified and no configuration file given.') segment = args.segment mindur = args.mindur maxdur = args.maxdur nbins = args.nbins direction = args.direction mode = args.mode fmt = args.fmt verbose = args.verbose profile = args.profile else: # Configuration file was given; read in that instead. cp = SafeConfigParser(defaults) cp.read(args.config) segment = cp.getfloat('DEFAULT', 'segment') mindur = cp.getfloat('DEFAULT', 'min_duration') maxdur = cp.getfloat('DEFAULT', 'max_duration') nbins = cp.getint('DEFAULT', 'n_bins') direction = cp.getint('DEFAULT', 'direction') mode = cp.get('DEFAULT', 'mode') fmt = cp.get('DEFAULT', 'print_format') verbose = cp.getboolean('DEFAULT', 'verbose') profile = cp.getboolean('DEFAULT', 'profiling') # Perform any sanity-checking on the arguments. __check_args(segment, mindur, maxdur, nbins, direction) # Send the data to the algorithm. for k, q, time, flux, fluxerr in read_mapper_output(sys.stdin): # Extract the array columns. time = np.array(time, dtype='float64') flux = np.array(flux, dtype='float64') fluxerr = np.array(fluxerr, dtype='float64') if profile: # Turn on profiling. pr = cProfile.Profile() pr.enable() if mode == 'python': raise NotImplementedError out = bls_pulse_python(time, flux, fluxerr, nbins, segment, mindur, maxdur, direction=direction) elif mode == 'vec': raise NotImplementedError out = bls_pulse_vec(time, flux, fluxerr, nbins, segment, mindur, maxdur, direction=direction) elif mode == 'cython': out = bls_pulse_cython(time, flux, fluxerr, nbins, segment, mindur, maxdur, direction=direction) else: raise ValueError('Invalid mode: %s' % mode) if profile: # Turn off profiling. pr.disable() ps = pstats.Stats(pr, stream=sys.stderr).sort_stats('time') ps.print_stats() if direction == 2: srsq_dip = out['srsq_dip'] duration_dip = out['duration_dip'] depth_dip = out['depth_dip'] midtime_dip = out['midtime_dip'] srsq_blip = out['srsq_blip'] duration_blip = out['duration_blip'] depth_blip = out['depth_blip'] midtime_blip = out['midtime_blip'] segstart = out['segstart'] segend = out['segend'] # Print output. if fmt == 'encoded': print "\t".join([k, q, encode_array(segstart), encode_array(segend), encode_array(srsq_dip), encode_array(duration_dip), encode_array(depth_dip), encode_array(midtime_dip), encode_array(srsq_blip), encode_array(duration_blip), encode_array(depth_blip), encode_array(midtime_blip)]) elif fmt == 'normal': print "-" * 120 print "Kepler " + k print "Quarters: " + q print "-" * 120 print '{0: <7s} {1: <13s} {2: <13s} {3: <13s} {4: <13s} {5: <13s} {6: <13s} {7: <13s} ' \ '{8: <13s}'.format('Segment', 'Dip SR^2', 'Dip dur.', 'Dip depth', 'Dip mid.', 'Blip SR^2', 'Blip dur.', 'Blip depth', 'Blip mid.') for i in xrange(len(srsq_dip)): print '{0: <7d} {1: <13.6f} {2: <13.6f} {3: <13.6f} {4: <13.6f} ' \ '{5: <13.6f} {6: <13.6f} {7: <13.6f} {8: <13.6f}'.format(i, srsq_dip[i], duration_dip[i], depth_dip[i], midtime_dip[i], srsq_blip[i], duration_blip[i], depth_blip[i], midtime_blip[i]) print "-" * 120 print print else: srsq = out['srsq'] duration = out['duration'] depth = out['depth'] midtime = out['midtime'] segstart = out['segstart'] segend = out['segend'] # Print output. if fmt == 'encoded': print "\t".join([k, q, encode_array(segstart), encode_array(segend), encode_array(srsq), encode_array(duration), encode_array(depth), encode_array(midtime)]) elif fmt == 'normal': print "-" * 80 print "Kepler " + k print "Quarters: " + q print "-" * 80 print '{0: <7s} {1: <13s} {2: <10s} {3: <9s} {4: <13s}'.format('Segment', 'SR^2', 'Duration', 'Depth', 'Midtime') for i in xrange(len(srsq)): print '{0: <7d} {1: <13.6f} {2: <10.6f} {3: <9.6f} {4: <13.6f}'.format(i, srsq[i], duration[i], depth[i], midtime[i]) print "-" * 80 print print
class Configuration(object): """Class storing configuration details for Modula""" def __init__(self, rootdir=".", cfg=None): defaults = { "@paths.modules": "modules", "@paths.storage": "storage" } if cfg: self.cfg = cfg else: cfgfiles = ["modules.cfg", "modula.cfg"] cfgfiles = [os.path.join(rootdir, file) for file in cfgfiles] self.cfg = Parser() self.cfg.read(cfgfiles) # Add defaults for k, v in defaults.items(): if k not in self: self[k] = v # Make sure all paths are absolute for k, v in self.items("@paths"): self["@paths.%s" % k] = os.path.abspath(os.path.expanduser(v)) # Make sure all input files are absolute for k, v in self.items("@inputs"): self["@inputs.%s" % k] = os.path.abspath(os.path.expanduser(v)) def _parse_name(self, name): if "." in name: section, option = name.split(".", 1) else: section = "@global" option = name return section, option def sections(self): return self.cfg.sections() def get(self, name): return self.cfg.get(*self._parse_name(name)) def getInt(self, name): return self.cfg.getint(*self._parse_name(name)) def getFloat(self, name): return self.cfg.getfloat(*self._parse_name(name)) def getBoolean(self, name): return self.cfg.getboolean(*self._parse_name(name)) def items(self, section): return self.cfg.items(section) def __contains__(self, name): return self.cfg.has_option(*self._parse_name(name)) def __getitem__(self, name): try: return self.cfg.get(*self._parse_name(name)) except KeyError: raise KeyError(name) def __setitem__(self, name, value): section, option = self._parse_name(name) if not self.cfg.has_section(section): self.cfg.add_section(section) self.cfg.set(section, option, value) def __delitem__(self, name): section, option = self._parse_name(name) try: ok = self.cfg.remove_option(section, option) if not ok: raise NoOptionError() except NoOptionError: raise KeyError(name)
def __init__(self): """Init""" cp = SafeConfigParser(defaults={ # True if axial symmetry 'axisym': True, # "Physical" length of the domain (in meters) 'LENGTH': 3000, # Number of elements 'NSPEC': 250, # Degree of the basis functions 'N': 4, # Degree of basis functions in the first element 'NGLJ': 4, # Number of time steps 'NTS': 2, # Courant CFL number 'CFL': 0.45, # Grid description 'GRID_TYPE': 'homogeneous', 'GRID_FILE': 'grid_homogeneous.txt', 'TICKS_FILE': 'ticks_homogeneous.txt', # kg/m^3 'DENSITY': 2500, # Pa 'RIGIDITY': 30000000000, # Duration of the source in dt 'TSOURCE': 100, # GLL point number on which the source is situated 'ISOURCE': 0, # Maximum amplitude 'MAX_AMPL': 1e7, # Source's type 'SOURCE_TYPE': 'ricker', # Decay rate for the ricker 'DECAY_RATE': 2.628, # Time steps between snapshots (0 == disabled) 'SNAPSHOT': 0, # Plot grid, source, and periodic results 'PLOT': False, # One image is displayed each DPLOT time step 'DPLOT': 10, }) with open('Par_file') as f: try: # Python 3 cp.read_string('[global]\n' + f.read(), source='Par_file') except AttributeError: # Python 2 cp.readfp(FakeGlobalSectionHead(f)) self.axisym = cp.getboolean('global', 'AXISYM') self.length = cp.getfloat('global', 'LENGTH') self.nSpec = cp.getint('global', 'NSPEC') self.N = cp.getint('global', 'N') self.NGLJ = cp.getint('global', 'NGLJ') self.nts = cp.getint('global', 'NTS') self.cfl = cp.getfloat('global', 'CFL') self.gridType = cp.get('global', 'GRID_TYPE').strip("'\"") self.gridFile = cp.get('global', 'GRID_FILE').strip("'\"") self.ticksFile = cp.get('global', 'TICKS_FILE').strip("'\"") self.meanRho = cp.getfloat('global', 'DENSITY') self.meanMu = cp.getfloat('global', 'RIGIDITY') self.tSource = cp.getfloat('global', 'TSOURCE') self.iSource = cp.getint('global', 'ISOURCE') self.maxAmpl = cp.getfloat('global', 'MAX_AMPL') self.sourceType = cp.get('global', 'SOURCE_TYPE').strip("'\"") self.decayRate = cp.getfloat('global', 'DECAY_RATE') self.snapshot = cp.getint('global', 'SNAPSHOT') self.plot = cp.getboolean('global', 'PLOT') self.dplot = cp.getfloat('global', 'DPLOT') parser = argparse.ArgumentParser( description='Spectral element method in a 1D medium') parser.add_argument('--no-plot', action='store_true', help='Force disable plotting') args = parser.parse_args() self.plot = self.plot and not args.no_plot # Number of GLL points per elements self.nGLL = self.N + 1 # Number of GLJ in the first element self.nGLJ = self.NGLJ + 1 # Number of points in the array self.nGlob = (self.nSpec - 1) * self.N + self.NGLJ + 1 self.ibool = self.make_global_index() # Time step (will be updated) self.dt = 0 # Gauss Lobatto Legendre points and integration weights: try: # Position of the GLL points in [-1,1] self.ksiGLL = gll.GLL_POINTS[self.N] # Integration weights self.wGLL = gll.GLL_WEIGHTS[self.N] except KeyError: raise ValueError('N = %d is invalid!' % (self.N, )) try: # Position of the GLJ points in [-1,1] self.ksiGLJ = gll.GLJ_POINTS[self.NGLJ] # Integration weights self.wGLJ = gll.GLJ_WEIGHTS[self.NGLJ] except KeyError: raise ValueError('NGLJ = %d is invalid!' % (self.NGLJ, )) # Derivatives of the Lagrange polynomials at the GLL points self.deriv = gll.lagrange_derivative(self.ksiGLL) self.derivGLJ = gll.glj_derivative(self.ksiGLJ)
class VideoWall(QtGui.QWidget): def __init__(self, app, parent): super(VideoWall, self).__init__() self.players = {} #reading config.ini self.config = SafeConfigParser() self.config.read('config.ini') self.number_of_cameras = self.config.getint('videowall', 'number_of_cameras', fallback=6) if self.number_of_cameras > 6: self.number_of_cameras = 6 self.window_aspect_ratio_coeff = self.config.getfloat( 'videowall', 'window_aspect_ratio_coeff', fallback=1.77777777778) self.audio_mode = self.config.get('videowall', 'audio_mode', fallback='hdmi') self.rtsp_sleep_delay = self.config.getint('videowall', 'rtsp_sleep_delay', fallback=1) self.hide_window_title = self.config.getint('videowall', 'hide_window_title', fallback=0) if self.hide_window_title == 1: self.setWindowFlags(QtCore.Qt.FramelessWindowHint) self.timer = None self.use_sequence_view = self.config.getint('videowall', 'use_sequence_view', fallback=0) if self.use_sequence_view == 1: self.sequence_view = {} self.sequence_current_index = 0 sequence = self.config.get('videowall', 'sequence_view', fallback='1').split(',') for s_ind in range(0, len(sequence)): self.sequence_view[s_ind] = int(sequence[s_ind]) self.sequence_view_seconds = self.config.getint( 'videowall', 'sequence_view_seconds', fallback=30000) #reading cameras settings self.cameras_settings = {} for cam_number in range(0, self.number_of_cameras): self.cameras_settings[cam_number] = { 'preview_url': (self.config.get('camera-' + str(cam_number + 1), 'preview_url', fallback='')), 'full_url': (self.config.get('camera-' + str(cam_number + 1), 'full_url', fallback='')) } #setting window title window_title = self.config.get('videowall', 'window_title', fallback='Video Wall') self.setWindowTitle(window_title) #setting cameras viewports self.stylesheet = self.config.get('videowall', 'camera_stylesheet', fallback='background-color: black') self.viewports = {} for cam_number in range(0, self.number_of_cameras): self.viewports[cam_number] = QtGui.QLabel(self) self.viewports[cam_number].setStyleSheet(self.stylesheet) self.viewports[cam_number].setObjectName('camera-' + str(cam_number + 1)) self.viewports[cam_number].setText(str(cam_number + 1)) #self.viewports[cam_number].installEventFilter(self) app.installEventFilter(self) def eventFilter(self, source, event): if event.type() == QtCore.QEvent.MouseButtonPress: if type(source) == QtGui.QLabel: if source.objectName() != 'camera-1': clicked_id = int(source.text()) - 1 self.changeMainCamera(clicked_id) if event.type() == QtCore.QEvent.KeyPress: current_player_id = int(self.viewports[0].text()) - 1 next_player_id = 0 if event.key() == 16777236: # -> next_player_id = current_player_id + 1 if next_player_id > (self.number_of_cameras - 1): next_player_id = 0 if event.key() == 16777234: # <- next_player_id = current_player_id - 1 if next_player_id < 0: next_player_id = self.number_of_cameras - 1 self.changeMainCamera(next_player_id) return super(VideoWall, self).eventFilter(source, event) def changeMainCamera(self, new_cam_id): cam_viewport = None for viewport_id in range(0, self.number_of_cameras): if self.viewports[viewport_id].text() == str(new_cam_id + 1): cam_viewport = self.viewports[viewport_id] current_player_id = int(self.viewports[0].text()) - 1 current_player_atX = self.players[current_player_id].atX current_player_atY = self.players[current_player_id].atY current_player_width = self.players[current_player_id].width current_player_height = self.players[current_player_id].height current_player_audio_mode = self.players[current_player_id].audio_mode clicked_id = new_cam_id clicked_atX = self.players[clicked_id].atX clicked_atY = self.players[clicked_id].atY clicked_width = self.players[clicked_id].width clicked_height = self.players[clicked_id].height clicked_audio_mode = self.players[clicked_id].audio_mode #stop main screen player camera-1 self.players[current_player_id].stop() self.players[current_player_id].process.stdin.write(b'q') self.players[current_player_id].process.terminate() #stop and kill clicked player self.players[clicked_id].stop() self.players[clicked_id].process.stdin.write(b'q') self.players[clicked_id].process.terminate() QtCore.QThread.sleep(self.rtsp_sleep_delay) #run preview from main camera-1 screen on this place cam_viewport.setText(str(current_player_id + 1)) self.players[current_player_id] = Player( self.cameras_settings[current_player_id]['preview_url'], clicked_atX, clicked_atY, clicked_width, clicked_height, clicked_audio_mode, current_player_id) #run clicked on main screen in HD (full_url) self.viewports[0].setText(str(clicked_id + 1)) self.players[clicked_id] = Player( self.cameras_settings[clicked_id]['full_url'], current_player_atX, current_player_atY, current_player_width, current_player_height, current_player_audio_mode, clicked_id) QtCore.QThread.sleep(self.rtsp_sleep_delay) def redrawViewPorts(self, width, height, width_difference): sc = 1 / 3 lc = 2 / 3 w_offset = width_difference / 2 for cam_number in range(0, self.number_of_cameras): if cam_number == 0: self.viewports[cam_number].resize(width * lc, height * lc) self.viewports[cam_number].move(w_offset, 0) else: if cam_number > 3: self.viewports[cam_number].resize(width * sc, height * sc) self.viewports[cam_number].move( w_offset + ((cam_number - 4) * width * sc), height * lc) else: self.viewports[cam_number].resize(width * sc, height * sc) self.viewports[cam_number].move(w_offset + (width * lc), (cam_number - 1) * height * sc) def redrawPlayers(self): for cam_number in range(0, self.number_of_cameras): pos = self.geometry().topLeft() + self.viewports[cam_number].pos() player_id = int(self.viewports[cam_number].text()) - 1 quality = 'preview_url' if cam_number == 0: quality = 'full_url' if cam_number in self.players: self.players[player_id].resize( pos.x(), pos.y(), self.viewports[cam_number].geometry().width(), self.viewports[cam_number].geometry().height()) else: if len(self.cameras_settings[cam_number][quality]) is not 0: self.players[cam_number] = Player( self.cameras_settings[cam_number][quality], pos.x(), pos.y(), self.viewports[cam_number].geometry().width(), self.viewports[cam_number].geometry().height(), self.audio_mode, cam_number) QtCore.QThread.sleep(self.rtsp_sleep_delay) #start sequence if needed if self.timer is None and self.use_sequence_view == 1: self.timer = QtCore.QTimer() self.timer.timeout.connect(self.timerEvent) self.timer.start(self.sequence_view_seconds) def resizeEvent(self, e): width = e.size().width() height = e.size().height() calc_width = height * self.window_aspect_ratio_coeff width_difference = 0 if calc_width > width: height = width / self.window_aspect_ratio_coeff else: width_difference = width - calc_width width = calc_width self.redrawViewPorts(width, height, width_difference) self.redrawPlayers() def moveEvent(self, e): self.redrawPlayers() def closeEvent(self, e): for player in self.players: if self.players[player].process is not None and self.players[ player].process.poll() is None: self.players[player].stop() self.players[player].process.stdin.write(b'q') self.players[player].process.terminate() e.accept() def changeEvent(self, event): if event.type() == QtCore.QEvent.WindowStateChange: if self.windowState() & QtCore.Qt.WindowMinimized: for player in self.players: self.players[player].hide() elif event.oldState() & QtCore.Qt.WindowMinimized: for player in self.players: self.players[player].unhide() QtGui.QWidget.changeEvent(self, event) def timerEvent(self): self.sequence_current_index = self.sequence_current_index + 1 if self.sequence_current_index > len(self.sequence_view) - 1: self.sequence_current_index = 0 self.changeMainCamera(self.sequence_view[self.sequence_current_index] - 1)
def run(ini_file='TOPKAPI.ini'): """Run the model with the set-up defined by `ini_file`. """ ##================================## ## Read the input file (*.ini) ## ##================================## config = SafeConfigParser() config.read(ini_file) print('Read the file ',ini_file) ##~~~~~~ Numerical_options ~~~~~~## solve_s = config.getfloat('numerical_options', 'solve_s') solve_o = config.getfloat('numerical_options', 'solve_o') solve_c = config.getfloat('numerical_options', 'solve_c') only_channel_output = config.getboolean('numerical_options', 'only_channel_output') ##~~~~~~~~~~~ input files ~~~~~~~~~~~## #Param file_global_param = config.get('input_files', 'file_global_param') file_cell_param = config.get('input_files', 'file_cell_param') #Rain file_rain = config.get('input_files', 'file_rain') #ETP file_ET = config.get('input_files', 'file_ET') #~~~~~~~~~~~ Group (simulated event) ~~~~~~~~~~~## group_name = config.get('groups', 'group_name') ##~~~~~~ Calibration ~~~~~~## fac_L = config.getfloat('calib_params', 'fac_L') fac_Ks = config.getfloat('calib_params', 'fac_Ks') fac_n_o = config.getfloat('calib_params', 'fac_n_o') fac_n_c = config.getfloat('calib_params', 'fac_n_c') ##~~~~~~ External flows ~~~~~~## external_flow = config.getboolean('external_flow', 'external_flow') if external_flow: file_Qexternal_flow = config.get('external_flow', 'file_Qexternal_flow') Xexternal_flow = config.getfloat('external_flow', 'Xexternal_flow') Yexternal_flow = config.getfloat('external_flow', 'Yexternal_flow') ##~~~~~~~~~~~ output files ~~~~~~~~~~## file_out = config.get('output_files', 'file_out') ut.check_file_exist(file_out) #create path_out if it doesn't exist if os.path.exists(file_out): first_run = False else: first_run = True append_output = config.getboolean('output_files', 'append_output') if append_output is True: fmode = 'a' else: fmode = 'w' ##============================## ## Read the forcing data ## ##============================## print('Read the forcing data') #~~~~Rainfall h5file_in = h5.openFile(file_rain,mode='r') group = '/'+group_name+'/' node = h5file_in.getNode(group+'rainfall') ndar_rain = node.read() h5file_in.close() #~~~~ETr - Reference crop ET h5file_in = h5.openFile(file_ET,mode='r') group = '/'+group_name+'/' node = h5file_in.getNode(group+'ETr') ndar_ETr = node.read() h5file_in.close() #~~~~ETo - Open water potential evap. h5file_in = h5.openFile(file_ET,mode='r') group = '/'+group_name+'/' node = h5file_in.getNode(group+'ETo') ndar_ETo = node.read() h5file_in.close() #~~~~external_flow flows if external_flow: ar_Qexternal_flow = np.loadtxt(file_Qexternal_flow)[:, 5] ##============================## ## Pretreatment of input data ## ##============================## print('Pretreatment of input data') #~~~~Read Global parameters file X, Dt, alpha_s, \ alpha_o, alpha_c, \ A_thres, W_min, W_max = pm.read_global_parameters(file_global_param) #~~~~Read Cell parameters file ar_cell_label, ar_coorx, \ ar_coory, ar_lambda, \ ar_Xc, ar_dam, \ ar_tan_beta, ar_tan_beta_channel, \ ar_L0, ar_Ks0, \ ar_theta_r, ar_theta_s, \ ar_n_o0, ar_n_c0, \ ar_cell_down, ar_pVs_t0, \ ar_Vo_t0, ar_Qc_t0, \ ar_kc, psi_b, lamda = pm.read_cell_parameters(file_cell_param) #~~~~Number of cell in the catchment nb_cell = len(ar_cell_label) #~~~~Computation of cell order ar_label_sort = pm.sort_cell(ar_cell_label, ar_cell_down) #~~~~Computation of upcells li_cell_up = pm.direct_up_cell(ar_cell_label, ar_cell_down, ar_label_sort) #~~~~Computation of drained area ar_A_drained = pm.drained_area(ar_label_sort, li_cell_up, X) #~~~~Apply calibration factors to the parameter values ar_L = ar_L0*fac_L ar_Ks = ar_Ks0*fac_Ks ar_n_o = ar_n_o0*fac_n_o ar_n_c = ar_n_c0*fac_n_c print('Max L=', max(ar_L)) print('Max Ks=', max(ar_Ks)) print('Max n_o=', max(ar_n_o)) print('Max n_c=', max(ar_n_c)) #~~~~Computation of model parameters from physical parameters ar_Vsm, ar_b_s, ar_b_o, \ ar_W, ar_b_c = pm.compute_cell_param(X, ar_Xc, Dt, alpha_s, alpha_o, alpha_c, nb_cell, A_thres, W_max, W_min, ar_lambda, ar_tan_beta, ar_tan_beta_channel, ar_L, ar_Ks, ar_theta_r, ar_theta_s, ar_n_o, ar_n_c, ar_A_drained) #~~~~Look for the cell of external_flow tunnel if external_flow: cell_external_flow = ut.find_cell_coordinates(ar_cell_label, Xexternal_flow, Yexternal_flow, ar_coorx, ar_coory, ar_lambda) print('external flows will be taken into account for cell no',\ cell_external_flow, ' coordinates ('\ ,Xexternal_flow,',',Yexternal_flow,')') #~~~~Number of simulation time steps nb_time_step = len(ndar_rain[:,0]) ##=============================## ## Variable array definition ## ##=============================## ## Initialisation of the reservoirs #Matrix of soil,overland and channel store at the begining of the time step if append_output and not first_run: print('Initialize from file') h5file_in = h5py.File(file_out) ar_Vs0 = h5file_in['/Soil/V_s'][-1, :] ar_Vc0 = h5file_in['/Channel/V_c'][-1, :] ar_Vo0 = h5file_in['/Overland/V_o'][-1, :] h5file_in.close() else: print('Initialize from parms') ar_Vs0 = fl.initial_volume_soil(ar_pVs_t0, ar_Vsm) ar_Vo0 = ar_Vo_t0 ar_Vc0 = fl.initial_volume_channel(ar_Qc_t0, ar_W, X, ar_n_c) ## Computed variables #Matrix of soil,overland and channel store at the end of the time step ar_Vs1 = np.ones(nb_cell)*-99.9 ar_Vo1 = np.ones(nb_cell)*-99.9 ar_Vc1 = np.ones(nb_cell)*-99.9 #Matrix of outflows between two time steps ar_Qs_out = np.ones(nb_cell)*-99.9 ar_Qo_out = np.ones(nb_cell)*-99.9 ar_Qc_out = np.zeros(nb_cell) ## Intermediate variables ar_a_s = np.ones(nb_cell)*-99.9 ar_a_o = np.ones(nb_cell)*-99.9 ar_a_c = np.ones(nb_cell)*-99.9 ar_Q_to_next_cell = np.ones(nb_cell)*-99.9 ar_Q_to_channel = np.ones(nb_cell)*-99.9 ar_Q_to_channel_sub = np.zeros(nb_cell) ar_Qc_cell_up = np.zeros(nb_cell) ar_ETa = np.zeros(nb_cell) ar_ET_channel = np.zeros(nb_cell) ##=============================## ## HDF5 output file definition ## ##=============================## h5file = h5.openFile(file_out, mode=fmode, title='TOPKAPI_out') root = h5file.getNode('/') root._v_attrs.pytopkapi_version = pytopkapi.__version__ root._v_attrs.pytopkapi_git_revision = pytopkapi.__git_revision__ atom = h5.Float32Atom() h5filter = h5.Filters(9)# maximum compression # create file structure as necessary grp_name = '/Soil' if grp_name not in h5file: h5file.createGroup('/', 'Soil', 'Soil arrays') if grp_name+'/Qs_out' not in h5file: array_Qs_out = h5file.createEArray(grp_name, 'Qs_out', atom, shape=(0,nb_cell), title='m3/s', filters=h5filter, expectedrows=nb_time_step) else: array_Qs_out = h5file.getNode(grp_name+'/Qs_out') if grp_name+'/V_s' not in h5file: array_Vs = h5file.createEArray(grp_name, 'V_s', atom, shape=(0, nb_cell), title='m3', filters=h5filter, expectedrows=nb_time_step+1) else: array_Vs = h5file.getNode(grp_name+'/V_s') grp_name = '/Overland' if grp_name not in h5file: h5file.createGroup('/', 'Overland', 'Overland arrays') if grp_name+'/Qo_out' not in h5file: array_Qo_out = h5file.createEArray(grp_name, 'Qo_out', atom, shape=(0,nb_cell), title='m3/s', filters=h5filter, expectedrows=nb_time_step) else: array_Qo_out = h5file.getNode(grp_name+'/Qo_out') if grp_name+'/V_o' not in h5file: array_Vo = h5file.createEArray(grp_name, 'V_o', atom, shape=(0,nb_cell), title='m3', filters=h5filter, expectedrows=nb_time_step+1) else: array_Vo = h5file.getNode(grp_name+'/V_o') grp_name = '/Channel' if grp_name not in h5file: h5file.createGroup('/', 'Channel', 'Channel arrays') if grp_name+'/Qc_out' not in h5file: array_Qc_out = h5file.createEArray(grp_name, 'Qc_out', atom, shape=(0,nb_cell), title='m3/s', filters=h5filter, expectedrows=nb_time_step) else: array_Qc_out = h5file.getNode(grp_name+'/Qc_out') if grp_name+'/V_c' not in h5file: array_Vc = h5file.createEArray(grp_name, 'V_c', atom, shape=(0,nb_cell), title='m3', filters=h5filter, expectedrows=nb_time_step) else: array_Vc = h5file.getNode(grp_name+'/V_c') if grp_name+'/Ec_out' not in h5file: array_Ec_out = h5file.createEArray(grp_name, 'Ec_out', atom, shape=(0,nb_cell), title='m3', filters=h5filter, expectedrows=nb_time_step) else: array_Ec_out = h5file.getNode(grp_name+'/Ec_out') if '/ET_out' not in h5file: array_ET_out = h5file.createEArray('/', 'ET_out', atom, shape=(0,nb_cell), title='mm', filters=h5filter, expectedrows=nb_time_step) else: array_ET_out = h5file.getNode('/ET_out') if '/Q_down' not in h5file: array_Q_down = h5file.createEArray('/', 'Q_down', atom, shape=(0,nb_cell), title='m3/s', filters=h5filter, expectedrows=nb_time_step) else: array_Q_down = h5file.getNode('/Q_down') if append_output is False or first_run is True: #Write the initial values into the output file array_Vs.append(ar_Vs0.reshape((1,nb_cell))) array_Vo.append(ar_Vo0.reshape((1,nb_cell))) array_Vc.append(ar_Vc0.reshape((1,nb_cell))) array_Qs_out.append(ar_Qs_out.reshape((1,nb_cell))) array_Qo_out.append(ar_Qo_out.reshape((1,nb_cell))) array_Qc_out.append(ar_Qc_out.reshape((1,nb_cell))) array_Q_down.append(ar_Q_to_next_cell.reshape((1,nb_cell))) array_ET_out.append(ar_ETa.reshape((1,nb_cell))) E_vol = ar_ET_channel*1e-3 * ar_W * ar_Xc array_Ec_out.append(E_vol.reshape((1,nb_cell))) eff_theta = ar_theta_s - ar_theta_r ##===========================## ## Core of the Model ## ##===========================## print('** NB_CELL=',nb_cell) print('** NB_TIME_STEP=',nb_time_step) print('--> SIMULATIONS <--') ## Loop on time for t in range(nb_time_step): print(t+1, '/', nb_time_step) eff_sat = ar_Vs0/ar_Vsm # estimate soil suction head using Brookes and Corey (1964) psi = psi_b/np.power(eff_sat, 1.0/lamda) ## Loop on cells n=-1 for cell1 in ar_label_sort: cell=np.where(ar_cell_label==cell1)[0][0] n=n+1 ## ======================== ## ## ===== INTERCEPTION ===== ## ## ======================== ## ## No interception for the moment ## ======================== ## ## ===== INFILTRATION ===== ## ## ======================== ## rain_rate = ndar_rain[t, cell]/Dt infiltration_depth = green_ampt_cum_infiltration(rain_rate, psi[cell], eff_theta[cell], eff_sat[cell], ar_Ks[cell], Dt) ## ====================== ## ## ===== SOIL STORE ===== ## ## ====================== ## #~~~~ Computation of soil input ar_a_s[cell] = fl.input_soil(infiltration_depth, Dt, X, ar_Q_to_next_cell, li_cell_up[cell]) #~~~~ Resolution of the equation dV/dt=a_s-b_s*V^alpha_s # Calculate the volume in the soil store at the end of the # current time-step. Vs_prim = om.solve_storage_eq(ar_a_s[cell], ar_b_s[cell], alpha_s, ar_Vs0[cell], Dt, solve_s) #~~~~ Computation of soil outflow and overland input ar_Qs_out[cell], ar_Vs1[cell] = fl.output_soil(ar_Vs0[cell], Vs_prim, ar_Vsm[cell], ar_a_s[cell], ar_b_s[cell], alpha_s, Dt) if ar_Qs_out[cell] < 0: print('Problem Soil:output greater than input....') print('n=', n, 'label=', cell) stop ## ========================== ## ## ===== OVERLAND STORE ===== ## ## ========================== ## #~~~~ Computation of overland input rain_excess = ndar_rain[t, cell] - infiltration_depth # convert mm to m^3/s rain_excess = max(0, (rain_excess*(10**-3)/Dt)*X**2) ar_a_o[cell] = max(0, ar_a_s[cell] \ - ((ar_Vs1[cell]-ar_Vs0[cell])/Dt \ + ar_Qs_out[cell]) \ + rain_excess) #~~~~ Resolution of the equation dV/dt=a_o-b_o*V^alpha_o ar_Vo1[cell] = om.solve_storage_eq(ar_a_o[cell], ar_b_o[cell], alpha_o, ar_Vo0[cell], Dt, solve_o) #~~~~ Computation of overland outflows ar_Qo_out[cell] = fl.Qout_computing(ar_Vo0[cell], ar_Vo1[cell], ar_a_o[cell], Dt) if ar_Qo_out[cell] < 0: print('Problem Overland:output greater than input....') print('n=', n, 'label=', cell) stop ## ============================= ## ## ===== FLOW PARTITIONING ===== ## ## ============================= ## # ar_Q_to_channel_sub doesn't get used for anything? ar_Q_to_next_cell[cell], \ ar_Q_to_channel[cell], \ ar_Q_to_channel_sub[cell] = fl.flow_partitioning(ar_lambda[cell], ar_Qs_out[cell], ar_Qo_out[cell], ar_W[cell], X, ar_Xc[cell]) ## ======================== ## ## ===== CHANNEL STORE ==== ## ## ======================== ## if ar_lambda[cell] == 1: if ar_cell_down[cell] >= 0 \ and ar_lambda[ar_cell_down[cell]] == 0: print('Problem: the present cell has a channel but not the cell down...') Stop #~~~~ Computation of channel input ar_a_c[cell], \ ar_Qc_cell_up[cell] = fl.input_channel(ar_Qc_out, ar_Q_to_channel[cell], li_cell_up[cell]) if external_flow \ and cell == np.where(ar_cell_label==cell_external_flow)[0][0]: ar_a_c[cell] = ar_a_c[cell] + ar_Qexternal_flow[t] #~~~~ Resolution of the equation dV/dt=a_c-b_c*V^alpha_c ar_Vc1[cell] = om.solve_storage_eq(ar_a_c[cell], ar_b_c[cell], alpha_c, ar_Vc0[cell], Dt, solve_c) #~~~~ Computation of channel outflows ar_Qc_out[cell] = fl.Qout_computing(ar_Vc0[cell], ar_Vc1[cell], ar_a_c[cell], Dt) if ar_Qc_out[cell] < 0: print('Problem Channel: output greater than input....') stop if str(ar_Qc_out[cell]).count('N') > 0: print(ar_Qc_out[cell]) print('Problem Channel: Non authorized operand....') stop else: ar_a_c[cell] = 0. ar_Vc1[cell] = 0. ar_Qc_out[cell] = 0. ## ============================== ## ## ===== EVAPOTRANSPIRATION ===== ## ## ============================== ## #~~~~~ From soil ar_ETa[cell], \ ar_Vs1[cell], \ ar_Vo1[cell] = em.evapot_soil_overland(ar_Vo1[cell], ar_Vs1[cell], ar_Vsm[cell], ar_kc[cell], ndar_ETr[t, cell], X) #~~~~~ Evaporation from channel if ar_lambda[cell] == 1: ar_ET_channel[cell], \ ar_Vc1[cell] = em.evapor_channel(ar_Vc1[cell], ndar_ETo[t, cell], ar_W[cell], ar_Xc[cell]) ####===================================#### #### Affectation of new vector values #### ####===================================#### ar_Vs0 = np.array(ar_Vs1) ar_Vo0 = np.array(ar_Vo1) ar_Vc0 = np.array(ar_Vc1) ####===================================#### #### Results writing at each time step #### ####===================================#### array_Vs.append(ar_Vs1.reshape((1,nb_cell))) array_Vo.append(ar_Vo1.reshape((1,nb_cell))) array_Vc.append(ar_Vc1.reshape((1,nb_cell))) array_Qs_out.append(ar_Qs_out.reshape((1,nb_cell))) array_Qo_out.append(ar_Qo_out.reshape((1,nb_cell))) array_Qc_out.append(ar_Qc_out.reshape((1,nb_cell))) array_Q_down.append(ar_Q_to_next_cell.reshape((1,nb_cell))) array_ET_out.append(ar_ETa.reshape((1,nb_cell))) E_vol = ar_ET_channel*1e-3 * ar_W * ar_Xc array_Ec_out.append(E_vol.reshape((1,nb_cell))) h5file.close() print(' ') print('***** THE END *****')
timestamp INTEGER NOT NULL CHECK (timestamp >= 0), rawresult BLOB, FOREIGN KEY (paramID) REFERENCES RequestedData(paramID) ON DELETE CASCADE ON UPDATE CASCADE, UNIQUE (paramID,timestamp) ); CREATE UNIQUE INDEX IF NOT EXISTS ResultIdx ON ResultsRaw (paramID ASC, timestamp ASC) ;''') Shutdown = threading.Event( ) # Setting this will cause all threads to eventually terminate Results = queue.Queue( ) # Logger threads will push raw results to here and Storers will consume items and put them into the database DataSched = sched.scheduler() ParamsLock, ParamsByLoggers, threads = threading.Lock(), {}, {} DataSched.enter(0, 0, DataWatchdog, (DataSched, config.getfloat('DEFAULT', 'watchdogperiod'))) DataSched.enter(0, 0, FetchAndStore, (sqllock, sql, Results, DataSched, config.getfloat('DEFAULT', 'datathreadperiod'), config.getfloat('DEFAULT', 'watchdogperiod'))) Storer = threading.Thread(target=DataSched.run, name='DataThread') Storer.start() # Start a thread for data storage signal.signal(signal.SIGTERM, TerminateSignal) try: while not Shutdown.is_set(): if not isfile(join(dirname(realpath(__file__)), f'{getpid()}.stop')): time.sleep(config.getfloat('DEFAULT', 'watchdogperiod')) else: logging.warning('Found the stop file. Initiating Shutdown') Shutdown.set()
def cli(): parser = argparse.ArgumentParser( description='Check HTTPs rules for validity') parser.add_argument( 'checker_config', help='an integer for the accumulator') parser.add_argument('rule_files', nargs="*", default=[], help="Specific XML rule files") parser.add_argument('--json_file', default=None, help='write results in json file') args = parser.parse_args() config = SafeConfigParser() config.read(args.checker_config) logfile = config.get("log", "logfile") loglevel = convertLoglevel(config.get("log", "loglevel")) if logfile == "-": logging.basicConfig(stream=sys.stderr, level=loglevel, format="%(levelname)s %(message)s") else: logging.basicConfig(filename=logfile, level=loglevel, format="%(asctime)s %(levelname)s %(message)s [%(pathname)s:%(lineno)d]") autoDisable = False if config.has_option("rulesets", "auto_disable"): autoDisable = config.getboolean("rulesets", "auto_disable") # Test rules even if they have default_off=... includeDefaultOff = False if config.has_option("rulesets", "include_default_off"): includeDefaultOff = config.getboolean( "rulesets", "include_default_off") ruledir = config.get("rulesets", "rulesdir") checkCoverage = False if config.has_option("rulesets", "check_coverage"): checkCoverage = config.getboolean("rulesets", "check_coverage") checkTargetValidity = False if config.has_option("rulesets", "check_target_validity"): checkTargetValidity = config.getboolean( "rulesets", "check_target_validity") checkNonmatchGroups = False if config.has_option("rulesets", "check_nonmatch_groups"): checkNonmatchGroups = config.getboolean( "rulesets", "check_nonmatch_groups") checkTestFormatting = False if config.has_option("rulesets", "check_test_formatting"): checkTestFormatting = config.getboolean( "rulesets", "check_test_formatting") certdir = config.get("certificates", "basedir") if config.has_option("rulesets", "skiplist") and config.has_option("rulesets", "skipfield"): skiplist = config.get("rulesets", "skiplist") skipfield = config.get("rulesets", "skipfield") with open(skiplist) as f: f.readline() for line in f: splitLine = line.split(",") fileHash = splitLine[0] if splitLine[int(skipfield)] == "1": skipdict[binascii.unhexlify(fileHash)] = 1 threadCount = config.getint("http", "threads") httpEnabled = True if config.has_option("http", "enabled"): httpEnabled = config.getboolean("http", "enabled") metricName = config.get("thresholds", "metric") thresholdDistance = config.getfloat("thresholds", "max_distance") metricClass = getMetricClass(metricName) metric = metricClass() # Debugging options, graphviz dump dumpGraphvizTrie = False if config.has_option("debug", "dump_graphviz_trie"): dumpGraphvizTrie = config.getboolean("debug", "dump_graphviz_trie") if dumpGraphvizTrie: graphvizFile = config.get("debug", "graphviz_file") exitAfterDump = config.getboolean("debug", "exit_after_dump") if args.rule_files: xmlFnames = args.rule_files else: xmlFnames = glob.glob(os.path.join(ruledir, "*.xml")) trie = RuleTrie() rulesets = [] coverageProblemsExist = False targetValidityProblemExist = False nonmatchGroupProblemsExist = False testFormattingProblemsExist = False for xmlFname in xmlFnames: logging.debug("Parsing {}".format(xmlFname)) if skipFile(xmlFname): logging.debug( "Skipping rule file '{}', matches skiplist.".format(xmlFname)) continue ruleset = Ruleset(etree.parse(open(xmlFname, "rb")).getroot(), xmlFname) if ruleset.defaultOff and not includeDefaultOff: logging.debug("Skipping rule '{}', reason: {}".format( ruleset.name, ruleset.defaultOff)) continue # Check whether ruleset coverage by tests was sufficient. if checkCoverage: logging.debug("Checking coverage for '{}'.".format(ruleset.name)) problems = ruleset.getCoverageProblems() for problem in problems: coverageProblemsExist = True logging.error(problem) if checkTargetValidity: logging.debug("Checking target validity for '{}'.".format(ruleset.name)) problems = ruleset.getTargetValidityProblems() for problem in problems: targetValidityProblemExist = True logging.error(problem) if checkNonmatchGroups: logging.debug("Checking non-match groups for '{}'.".format(ruleset.name)) problems = ruleset.getNonmatchGroupProblems() for problem in problems: nonmatchGroupProblemsExist = True logging.error(problem) if checkTestFormatting: logging.debug("Checking test formatting for '{}'.".format(ruleset.name)) problems = ruleset.getTestFormattingProblems() for problem in problems: testFormattingProblemsExist = True logging.error(problem) trie.addRuleset(ruleset) rulesets.append(ruleset) # Trie is built now, dump it if it's set in config if dumpGraphvizTrie: logging.debug("Dumping graphviz ruleset trie") graph = trie.generateGraphizGraph() if graphvizFile == "-": graph.dot() else: with open(graphvizFile, "w") as gvFd: graph.dot(gvFd) if exitAfterDump: sys.exit(0) fetchOptions = http_client.FetchOptions(config) fetchers = list() # Ensure "default" is in the platform dirs if not os.path.isdir(os.path.join(certdir, "default")): raise RuntimeError( "Platform 'default' is missing from certificate directories") platforms = http_client.CertificatePlatforms( os.path.join(certdir, "default")) fetchers.append(http_client.HTTPFetcher( "default", platforms, fetchOptions, trie)) # fetches pages with unrewritten URLs fetcherPlain = http_client.HTTPFetcher("default", platforms, fetchOptions) urlList = [] if config.has_option("http", "url_list"): with open(config.get("http", "url_list")) as urlFile: urlList = [line.rstrip() for line in urlFile.readlines()] if httpEnabled: taskQueue = queue.Queue(1000) resQueue = queue.Queue() startTime = time.time() testedUrlPairCount = 0 config.getboolean("debug", "exit_after_dump") for i in range(threadCount): t = UrlComparisonThread( taskQueue, metric, thresholdDistance, autoDisable, resQueue) t.setDaemon(True) t.start() # set of main pages to test mainPages = set(urlList) # If list of URLs to test/scan was not defined, use the test URL extraction # methods built into the Ruleset implementation. if not urlList: for ruleset in rulesets: if ruleset.platform != "default" and os.path.isdir(os.path.join(certdir, ruleset.platform)): theseFetchers = copy.deepcopy(fetchers) platforms.addPlatform(ruleset.platform, os.path.join(certdir, ruleset.platform)) theseFetchers.append(http_client.HTTPFetcher( ruleset.platform, platforms, fetchOptions, trie)) else: theseFetchers = fetchers testUrls = [] for test in ruleset.tests: if not ruleset.excludes(test.url): testedUrlPairCount += 1 testUrls.append(test.url) else: # TODO: We should fetch the non-rewritten exclusion URLs to make # sure they still exist. logging.debug("Skipping excluded URL {}".format(test.url)) task = ComparisonTask(testUrls, fetcherPlain, theseFetchers, ruleset) taskQueue.put(task) taskQueue.join() logging.info("Finished in {:.2f} seconds. Loaded rulesets: {}, URL pairs: {}.".format( time.time() - startTime, len(xmlFnames), testedUrlPairCount)) if args.json_file: json_output(resQueue, args.json_file, problems) if checkCoverage: if coverageProblemsExist: return 1 # exit with error code if checkTargetValidity: if targetValidityProblemExist: return 1 # exit with error code if checkNonmatchGroups: if nonmatchGroupProblemsExist: return 1 # exit with error code if checkTestFormatting: if testFormattingProblemsExist: return 1 # exit with error code return 0 # exit with success
def run(ini_file='TOPKAPI.ini'): """Run the model with the set-up defined by `ini_file`. """ ##================================## ## Read the input file (*.ini) ## ##================================## config = SafeConfigParser() config.read(ini_file) print('Read the file ',ini_file) ##~~~~~~ Numerical_options ~~~~~~## solve_s = config.getfloat('numerical_options', 'solve_s') solve_o = config.getfloat('numerical_options', 'solve_o') solve_c = config.getfloat('numerical_options', 'solve_c') only_channel_output = config.getboolean('numerical_options', 'only_channel_output') ##~~~~~~~~~~~ input files ~~~~~~~~~~~## #Param file_global_param = config.get('input_files', 'file_global_param') file_cell_param = config.get('input_files', 'file_cell_param') #Rain file_rain = config.get('input_files', 'file_rain') #ETP file_ET = config.get('input_files', 'file_ET') #~~~~~~~~~~~ Group (simulated event) ~~~~~~~~~~~## group_name = config.get('groups', 'group_name') ##~~~~~~ Calibration ~~~~~~## fac_L = config.getfloat('calib_params', 'fac_L') fac_Ks = config.getfloat('calib_params', 'fac_Ks') fac_n_o = config.getfloat('calib_params', 'fac_n_o') fac_n_c = config.getfloat('calib_params', 'fac_n_c') ##~~~~~~ External flows ~~~~~~## external_flow = config.getboolean('external_flow', 'external_flow') if external_flow: file_Qexternal_flow = config.get('external_flow', 'file_Qexternal_flow') Xexternal_flow = config.getfloat('external_flow', 'Xexternal_flow') Yexternal_flow = config.getfloat('external_flow', 'Yexternal_flow') ##~~~~~~~~~~~ output files ~~~~~~~~~~## file_out = config.get('output_files', 'file_out') ut.check_file_exist(file_out) #create path_out if it doesn't exist if os.path.exists(file_out): first_run = False else: first_run = True append_output = config.getboolean('output_files', 'append_output') if append_output is True: fmode = 'a' else: fmode = 'w' ##============================## ## Read the forcing data ## ##============================## print('Read the forcing data') #~~~~Rainfall h5file_in = h5.open_file(file_rain,mode='r') group = '/'+group_name+'/' node = h5file_in.get_node(group+'rainfall') ndar_rain = node.read() h5file_in.close() #~~~~ETr - Reference crop ET h5file_in = h5.open_file(file_ET,mode='r') group = '/'+group_name+'/' node = h5file_in.get_node(group+'ETr') ndar_ETr = node.read() h5file_in.close() #~~~~ETo - Open water potential evap. h5file_in = h5.open_file(file_ET,mode='r') group = '/'+group_name+'/' node = h5file_in.get_node(group+'ETo') ndar_ETo = node.read() h5file_in.close() #~~~~external_flow flows if external_flow: ar_Qexternal_flow = np.loadtxt(file_Qexternal_flow)[:, 5] ##============================## ## Pretreatment of input data ## ##============================## print('Pretreatment of input data') #~~~~Read Global parameters file X, Dt, alpha_s, \ alpha_o, alpha_c, \ A_thres, W_min, W_max = pm.read_global_parameters(file_global_param) #~~~~Read Cell parameters file ar_cell_label, ar_coorx, \ ar_coory, ar_lambda, \ ar_Xc, ar_dam, \ ar_tan_beta, ar_tan_beta_channel, \ ar_L0, ar_Ks0, \ ar_theta_r, ar_theta_s, \ ar_n_o0, ar_n_c0, \ ar_cell_down, ar_pVs_t0, \ ar_Vo_t0, ar_Qc_t0, \ ar_kc, psi_b, lamda = pm.read_cell_parameters(file_cell_param) #~~~~Number of cell in the catchment nb_cell = len(ar_cell_label) #~~~~Computation of cell order ar_label_sort = pm.sort_cell(ar_cell_label, ar_cell_down) #~~~~Computation of upcells li_cell_up = pm.direct_up_cell(ar_cell_label, ar_cell_down, ar_label_sort) #~~~~Computation of drained area ar_A_drained = pm.drained_area(ar_label_sort, li_cell_up, X) #~~~~Apply calibration factors to the parameter values ar_L = ar_L0*fac_L ar_Ks = ar_Ks0*fac_Ks ar_n_o = ar_n_o0*fac_n_o ar_n_c = ar_n_c0*fac_n_c print('Max L=', max(ar_L)) print('Max Ks=', max(ar_Ks)) print('Max n_o=', max(ar_n_o)) print('Max n_c=', max(ar_n_c)) #~~~~Computation of model parameters from physical parameters ar_Vsm, ar_b_s, ar_b_o, \ ar_W, ar_b_c = pm.compute_cell_param(X, ar_Xc, Dt, alpha_s, alpha_o, alpha_c, nb_cell, A_thres, W_max, W_min, ar_lambda, ar_tan_beta, ar_tan_beta_channel, ar_L, ar_Ks, ar_theta_r, ar_theta_s, ar_n_o, ar_n_c, ar_A_drained) #~~~~Look for the cell of external_flow tunnel if external_flow: cell_external_flow = ut.find_cell_coordinates(ar_cell_label, Xexternal_flow, Yexternal_flow, ar_coorx, ar_coory, ar_lambda) print('external flows will be taken into account for cell no',\ cell_external_flow, ' coordinates ('\ ,Xexternal_flow,',',Yexternal_flow,')') #~~~~Number of simulation time steps nb_time_step = len(ndar_rain[:,0]) ##=============================## ## Variable array definition ## ##=============================## ## Initialisation of the reservoirs #Matrix of soil,overland and channel store at the begining of the time step if append_output and not first_run: print('Initialize from file') h5file_in = h5py.File(file_out) ar_Vs0 = h5file_in['/Soil/V_s'][-1, :] ar_Vc0 = h5file_in['/Channel/V_c'][-1, :] ar_Vo0 = h5file_in['/Overland/V_o'][-1, :] h5file_in.close() else: print('Initialize from parms') ar_Vs0 = fl.initial_volume_soil(ar_pVs_t0, ar_Vsm) ar_Vo0 = ar_Vo_t0 ar_Vc0 = fl.initial_volume_channel(ar_Qc_t0, ar_W, X, ar_n_c) ## Computed variables #Matrix of soil,overland and channel store at the end of the time step ar_Vs1 = np.ones(nb_cell)*-99.9 ar_Vo1 = np.ones(nb_cell)*-99.9 ar_Vc1 = np.ones(nb_cell)*-99.9 #Matrix of outflows between two time steps ar_Qs_out = np.ones(nb_cell)*-99.9 ar_Qo_out = np.ones(nb_cell)*-99.9 ar_Qc_out = np.zeros(nb_cell) ## Intermediate variables ar_a_s = np.ones(nb_cell)*-99.9 ar_a_o = np.ones(nb_cell)*-99.9 ar_a_c = np.ones(nb_cell)*-99.9 ar_Q_to_next_cell = np.ones(nb_cell)*-99.9 ar_Q_to_channel = np.ones(nb_cell)*-99.9 ar_Q_to_channel_sub = np.zeros(nb_cell) ar_Qc_cell_up = np.zeros(nb_cell) ar_ETa = np.zeros(nb_cell) ar_ET_channel = np.zeros(nb_cell) ##=============================## ## HDF5 output file definition ## ##=============================## h5file = h5.open_file(file_out, mode=fmode, title='TOPKAPI_out') root = h5file.get_node('/') root._v_attrs.pytopkapi_version = pytopkapi.__version__ root._v_attrs.pytopkapi_git_revision = pytopkapi.__git_revision__ atom = h5.Float32Atom() h5filter = h5.Filters(9)# maximum compression # create file structure as necessary grp_name = '/Soil' if grp_name not in h5file: h5file.create_group('/', 'Soil', 'Soil arrays') if grp_name+'/Qs_out' not in h5file: array_Qs_out = h5file.create_earray(grp_name, 'Qs_out', atom, shape=(0,nb_cell), title='m3/s', filters=h5filter, expectedrows=nb_time_step) else: array_Qs_out = h5file.get_node(grp_name+'/Qs_out') if grp_name+'/V_s' not in h5file: array_Vs = h5file.create_earray(grp_name, 'V_s', atom, shape=(0, nb_cell), title='m3', filters=h5filter, expectedrows=nb_time_step+1) else: array_Vs = h5file.get_node(grp_name+'/V_s') grp_name = '/Overland' if grp_name not in h5file: h5file.create_group('/', 'Overland', 'Overland arrays') if grp_name+'/Qo_out' not in h5file: array_Qo_out = h5file.create_earray(grp_name, 'Qo_out', atom, shape=(0,nb_cell), title='m3/s', filters=h5filter, expectedrows=nb_time_step) else: array_Qo_out = h5file.get_node(grp_name+'/Qo_out') if grp_name+'/V_o' not in h5file: array_Vo = h5file.create_earray(grp_name, 'V_o', atom, shape=(0,nb_cell), title='m3', filters=h5filter, expectedrows=nb_time_step+1) else: array_Vo = h5file.get_node(grp_name+'/V_o') grp_name = '/Channel' if grp_name not in h5file: h5file.create_group('/', 'Channel', 'Channel arrays') if grp_name+'/Qc_out' not in h5file: array_Qc_out = h5file.create_earray(grp_name, 'Qc_out', atom, shape=(0,nb_cell), title='m3/s', filters=h5filter, expectedrows=nb_time_step) else: array_Qc_out = h5file.get_node(grp_name+'/Qc_out') if grp_name+'/V_c' not in h5file: array_Vc = h5file.create_earray(grp_name, 'V_c', atom, shape=(0,nb_cell), title='m3', filters=h5filter, expectedrows=nb_time_step) else: array_Vc = h5file.get_node(grp_name+'/V_c') if grp_name+'/Ec_out' not in h5file: array_Ec_out = h5file.create_earray(grp_name, 'Ec_out', atom, shape=(0,nb_cell), title='m3', filters=h5filter, expectedrows=nb_time_step) else: array_Ec_out = h5file.get_node(grp_name+'/Ec_out') if '/ET_out' not in h5file: array_ET_out = h5file.create_earray('/', 'ET_out', atom, shape=(0,nb_cell), title='mm', filters=h5filter, expectedrows=nb_time_step) else: array_ET_out = h5file.get_node('/ET_out') if '/Q_down' not in h5file: array_Q_down = h5file.create_earray('/', 'Q_down', atom, shape=(0,nb_cell), title='m3/s', filters=h5filter, expectedrows=nb_time_step) else: array_Q_down = h5file.get_node('/Q_down') if append_output is False or first_run is True: #Write the initial values into the output file array_Vs.append(ar_Vs0.reshape((1,nb_cell))) array_Vo.append(ar_Vo0.reshape((1,nb_cell))) array_Vc.append(ar_Vc0.reshape((1,nb_cell))) array_Qs_out.append(ar_Qs_out.reshape((1,nb_cell))) array_Qo_out.append(ar_Qo_out.reshape((1,nb_cell))) array_Qc_out.append(ar_Qc_out.reshape((1,nb_cell))) array_Q_down.append(ar_Q_to_next_cell.reshape((1,nb_cell))) array_ET_out.append(ar_ETa.reshape((1,nb_cell))) E_vol = ar_ET_channel*1e-3 * ar_W * ar_Xc array_Ec_out.append(E_vol.reshape((1,nb_cell))) eff_theta = ar_theta_s - ar_theta_r ##===========================## ## Core of the Model ## ##===========================## print('** NB_CELL=',nb_cell) print('** NB_TIME_STEP=',nb_time_step) print('--> SIMULATIONS <--') ## Loop on time for t in range(nb_time_step): print(t+1, '/', nb_time_step) eff_sat = ar_Vs0/ar_Vsm # estimate soil suction head using Brookes and Corey (1964) psi = psi_b/np.power(eff_sat, 1.0/lamda) ## Loop on cells n=-1 for cell1 in ar_label_sort: cell=np.where(ar_cell_label==cell1)[0][0] n=n+1 ## ======================== ## ## ===== INTERCEPTION ===== ## ## ======================== ## ## No interception for the moment ## ======================== ## ## ===== INFILTRATION ===== ## ## ======================== ## rain_rate = ndar_rain[t, cell]/Dt infiltration_depth = green_ampt_cum_infiltration(rain_rate, psi[cell], eff_theta[cell], eff_sat[cell], ar_Ks[cell], Dt) ## ====================== ## ## ===== SOIL STORE ===== ## ## ====================== ## #~~~~ Computation of soil input ar_a_s[cell] = fl.input_soil(infiltration_depth, Dt, X, ar_Q_to_next_cell, li_cell_up[cell]) #~~~~ Resolution of the equation dV/dt=a_s-b_s*V^alpha_s # Calculate the volume in the soil store at the end of the # current time-step. Vs_prim = om.solve_storage_eq(ar_a_s[cell], ar_b_s[cell], alpha_s, ar_Vs0[cell], Dt, solve_s) #~~~~ Computation of soil outflow and overland input ar_Qs_out[cell], ar_Vs1[cell] = fl.output_soil(ar_Vs0[cell], Vs_prim, ar_Vsm[cell], ar_a_s[cell], ar_b_s[cell], alpha_s, Dt) if ar_Qs_out[cell] < 0: print('Problem Soil:output greater than input....') print('n=', n, 'label=', cell) stop ## ========================== ## ## ===== OVERLAND STORE ===== ## ## ========================== ## #~~~~ Computation of overland input rain_excess = ndar_rain[t, cell] - infiltration_depth # convert mm to m^3/s rain_excess = max(0, (rain_excess*(10**-3)/Dt)*X**2) ar_a_o[cell] = max(0, ar_a_s[cell] \ - ((ar_Vs1[cell]-ar_Vs0[cell])/Dt \ + ar_Qs_out[cell]) \ + rain_excess) #~~~~ Resolution of the equation dV/dt=a_o-b_o*V^alpha_o ar_Vo1[cell] = om.solve_storage_eq(ar_a_o[cell], ar_b_o[cell], alpha_o, ar_Vo0[cell], Dt, solve_o) #~~~~ Computation of overland outflows ar_Qo_out[cell] = fl.Qout_computing(ar_Vo0[cell], ar_Vo1[cell], ar_a_o[cell], Dt) if ar_Qo_out[cell] < 0: print('Problem Overland:output greater than input....') print('n=', n, 'label=', cell) stop ## ============================= ## ## ===== FLOW PARTITIONING ===== ## ## ============================= ## # ar_Q_to_channel_sub doesn't get used for anything? ar_Q_to_next_cell[cell], \ ar_Q_to_channel[cell], \ ar_Q_to_channel_sub[cell] = fl.flow_partitioning(ar_lambda[cell], ar_Qs_out[cell], ar_Qo_out[cell], ar_W[cell], X, ar_Xc[cell]) ## ======================== ## ## ===== CHANNEL STORE ==== ## ## ======================== ## if ar_lambda[cell] == 1: if ar_cell_down[cell] >= 0 \ and ar_lambda[ar_cell_down[cell]] == 0: print('Problem: the present cell has a channel but not the cell down...') Stop #~~~~ Computation of channel input ar_a_c[cell], \ ar_Qc_cell_up[cell] = fl.input_channel(ar_Qc_out, ar_Q_to_channel[cell], li_cell_up[cell]) if external_flow \ and cell == np.where(ar_cell_label==cell_external_flow)[0][0]: ar_a_c[cell] = ar_a_c[cell] + ar_Qexternal_flow[t] #~~~~ Resolution of the equation dV/dt=a_c-b_c*V^alpha_c ar_Vc1[cell] = om.solve_storage_eq(ar_a_c[cell], ar_b_c[cell], alpha_c, ar_Vc0[cell], Dt, solve_c) #~~~~ Computation of channel outflows ar_Qc_out[cell] = fl.Qout_computing(ar_Vc0[cell], ar_Vc1[cell], ar_a_c[cell], Dt) if ar_Qc_out[cell] < 0: print('Problem Channel: output greater than input....') stop if str(ar_Qc_out[cell]).count('N') > 0: print(ar_Qc_out[cell]) print('Problem Channel: Non authorized operand....') stop else: ar_a_c[cell] = 0. ar_Vc1[cell] = 0. ar_Qc_out[cell] = 0. ## ============================== ## ## ===== EVAPOTRANSPIRATION ===== ## ## ============================== ## #~~~~~ From soil ar_ETa[cell], \ ar_Vs1[cell], \ ar_Vo1[cell] = em.evapot_soil_overland(ar_Vo1[cell], ar_Vs1[cell], ar_Vsm[cell], ar_kc[cell], ndar_ETr[t, cell], X) #~~~~~ Evaporation from channel if ar_lambda[cell] == 1: ar_ET_channel[cell], \ ar_Vc1[cell] = em.evapor_channel(ar_Vc1[cell], ndar_ETo[t, cell], ar_W[cell], ar_Xc[cell]) ####===================================#### #### Affectation of new vector values #### ####===================================#### ar_Vs0 = np.array(ar_Vs1) ar_Vo0 = np.array(ar_Vo1) ar_Vc0 = np.array(ar_Vc1) ####===================================#### #### Results writing at each time step #### ####===================================#### array_Vs.append(ar_Vs1.reshape((1,nb_cell))) array_Vo.append(ar_Vo1.reshape((1,nb_cell))) array_Vc.append(ar_Vc1.reshape((1,nb_cell))) array_Qs_out.append(ar_Qs_out.reshape((1,nb_cell))) array_Qo_out.append(ar_Qo_out.reshape((1,nb_cell))) array_Qc_out.append(ar_Qc_out.reshape((1,nb_cell))) array_Q_down.append(ar_Q_to_next_cell.reshape((1,nb_cell))) array_ET_out.append(ar_ETa.reshape((1,nb_cell))) E_vol = ar_ET_channel*1e-3 * ar_W * ar_Xc array_Ec_out.append(E_vol.reshape((1,nb_cell))) h5file.close() print(' ') print('***** THE END *****')
class ConfigEd(object): """ConfigEd(filename) creates a full powered config editor for wesen""" def __init__(self, filename): self.configfile = filename self.configParser = SafeConfigParser() self.alwaysDefaults = False def printConfig(self): """prints the configfile to screen""" print(("%s:" % self.configfile)) for line in open(self.configfile).readlines(): print(line[:-1]) # -1 for \n removal print(".") def getConfig(self): """getConfig() returns the config dict. if a section, option or value is not found, the default values will be returned. """ if(os.path.exists(self.configfile)): self.configParser.read(self.configfile) result = {} for entry in CONFIG_OPTIONS: (section, options) = entry result[section] = {} if(self.configParser.has_section(section)): for option in options: (key, entryType) = option result[section][key] = \ self.getEntryFromConfigParser( section, key, entryType) return result else: self.writeDefaults() return self.getConfig() def getEntryFromConfigParser(self, section, key, entryType): """depending on entryType, calls the appropriate getter from self.configParser""" value = None if(entryType == str): value = self.configParser.get(section, key) elif(entryType == int): value = self.configParser.getint(section, key) elif(entryType == bool): value = self.configParser.getboolean(section, key) elif(entryType == float): value = self.configParser.getfloat(section, key) if(value is None): value = CONFIG_DEFAULTS[section][key] return value def writeDefaults(self): """write config defaults to file.""" self.alwaysDefaults = True self.edit() def edit(self): """Interactive config-file editing; It will ask the user every single option possible, always showing the default values and sometimes a comment, making it easier to understand the configfile for newbies. """ if(self.alwaysDefaults): write = True elif(os.path.exists(self.configfile)): write = (input(STRING_ERROR_FILEEXISTS % self.configfile) == "y") else: write = True if(write): self.configParser.read(self.configfile) for entry in CONFIG_OPTIONS: (section, options) = entry if(not self.configParser.has_section(section)): self.configParser.add_section(section) if(not self.alwaysDefaults): print("[%s]" % (section)) for option in options: key = option[0] self.setDefInputStandard(section, key) self.configParser.write(open(self.configfile, "w")) print((STRING_MESSAGE_WROTE % self.configfile)) else: print((STRING_ERROR_NOTWROTE % self.configfile)) def setDefInputStandard(self, section, key): """fetches explanation from .strings and default value from .defaults""" # TODO why upper? we should have lower-case here. explanationString = STRING_CONFIGED[section.upper()][key.upper()] self.configParser.set(section, key, str(self.def_input(CONFIG_DEFAULTS[section][key], explanationString))) def def_input(self, default, msg): """derived from raw_input, def_input(default,prompt) returns a user input or, if blank, the specified default. """ if(not self.alwaysDefaults): try: result = input("# default: %s\t%s" % (default, msg)) except EOFError: self.alwaysDefaults = True return default print("") if(not result): return default else: return result else: return default
class GlobalSettings(GObject.Object): """ Pitivi app settings. The settings object loads settings from different sources, currently: - the local configuration file, - environment variables. Modules declare which settings they wish to access by calling the addConfigOption() class method during initialization. @cvar options: A dictionnary of available settings. @cvar environment: A list of the controlled environment variables. """ options = {} environment = set() defaults = {} __gsignals__ = {} def __init__(self, **unused_kwargs): GObject.Object.__init__(self) self._config = SafeConfigParser() self._readSettingsFromConfigurationFile() self._readSettingsFromEnvironmentVariables() def _readSettingsFromConfigurationFile(self): """ Read the configuration from the user configuration file. """ try: conf_file_path = os.path.join(xdg_config_home(), "pitivi.conf") self._config.read(conf_file_path) except UnicodeDecodeError: unicode_error_dialog() return except ParsingError: return for (section, attrname, typ, key, env, value) in self.iterAllOptions(): if not self._config.has_section(section): continue if key and self._config.has_option(section, key): if typ == int or typ == int: try: value = self._config.getint(section, key) except ValueError: # In previous configurations we incorrectly stored # ints using float values. value = int(self._config.getfloat(section, key)) elif typ == float: value = self._config.getfloat(section, key) elif typ == bool: value = self._config.getboolean(section, key) else: value = self._config.get(section, key) setattr(self, attrname, value) @classmethod def readSettingSectionFromFile(self, cls, section): """ Force reading a particular section of the settings file. Use this if you dynamically determine settings sections/keys at runtime (like in tabsmanager.py). Otherwise, the settings file would be read only once (at the initialization phase of your module) and your config sections would never be read, and thus values would be reset to defaults on every startup because GlobalSettings would think they don't exist. """ if cls._config.has_section(section): for option in cls._config.options(section): # We don't know the value type in advance, just try them all. try: value = cls._config.getfloat(section, option) except: try: value = cls._config.getint(section, option) except: try: value = cls._config.getboolean(section, option) except: value = cls._config.get(section, option) setattr(cls, section + option, value) def _readSettingsFromEnvironmentVariables(self): """ Override options values using their registered environment variables. """ for section, attrname, typ, key, env, value in self.iterAllOptions(): if not env: # This option does not have an environment variable name. continue var = get_env_by_type(typ, env) if var is not None: setattr(self, attrname, var) def _writeSettingsToConfigurationFile(self): conf_file_path = os.path.join(xdg_config_home(), "pitivi.conf") for (section, attrname, typ, key, env_var, value) in self.iterAllOptions(): if not self._config.has_section(section): self._config.add_section(section) if key: if value is not None: self._config.set(section, key, str(value)) else: self._config.remove_option(section, key) try: file = open(conf_file_path, 'w') except IOError as OSError: return self._config.write(file) file.close() def storeSettings(self): """ Write settings to the user's local configuration file. Note that only those settings which were added with a section and a key value are stored. """ self._writeSettingsToConfigurationFile() def iterAllOptions(self): """ Iterate over all registered options @return: an iterator which yields a tuple of (attrname, type, key, environment, value) for each option. """ for section, options in list(self.options.items()): for attrname, (typ, key, environment) in list(options.items()): yield section, attrname, typ, key, environment, getattr(self, attrname) def isDefault(self, attrname): return getattr(self, attrname) == self.defaults[attrname] def setDefault(self, attrname): setattr(self, attrname, self.defaults[attrname]) @classmethod def addConfigOption(cls, attrname, type_=None, section=None, key=None, environment=None, default=None, notify=False,): """ Add a configuration option. This function should be called during module initialization, before the config file is actually read. By default, only options registered beforehand will be loaded. See mainwindow.py and medialibrary.py for examples of usage. If you want to add configuration options after initialization, use the readSettingSectionFromFile method to force reading later on. See tabsmanager.py for an example of such a scenario. @param attrname: the attribute of this class which represents the option @type attrname: C{str} @param type_: type of the attribute. Unnecessary if default is given. @type type_: a builtin or class @param section: The section of the config file under which this option is saved. This section must have been added with addConfigSection(). Not necessary if key is not given. @param key: the key under which this option is to be saved. Can be none if this option should not be saved. @type key: C{str} @param notify: whether or not this attribute should emit notification signals when modified (default is False). @type notify: C{boolean} """ if section and section not in cls.options: raise ConfigError( "You must add the section \"%s\" first." % section) if key and not section: raise ConfigError( "You must specify a section for key \"%s\"" % key) if section and key in cls.options[section]: raise ConfigError("Option \"%s\" is already in use.") if hasattr(cls, attrname): raise ConfigError("Settings attribute \"%s\" is already in use.") if environment and environment in cls.environment: raise ConfigError("Settings environment varaible \"%s\" is" "already in use.") if not type_ and default is None: raise ConfigError("Settings attribute \"%s\" has must have a" " type or a default." % attrname) if not type_: type_ = type(default) if notify: notification = Notification(attrname) setattr(cls, attrname, notification) setattr(cls, "_" + attrname, default) GObject.signal_new(notification.signame, cls, GObject.SIGNAL_RUN_LAST, None, ()) else: setattr(cls, attrname, default) if section and key: cls.options[section][attrname] = type_, key, environment cls.environment.add(environment) cls.defaults[attrname] = default @classmethod def addConfigSection(cls, section): """ Add a section to the local config file. @param section: The section name. This section must not already exist. @type section: C{str} """ if section in cls.options: raise ConfigError("Duplicate Section \"%s\"." % section) cls.options[section] = {} @classmethod def notifiesConfigOption(cls, attrname): signal_name = Notification.signalName(attrname) GObject.signal_lookup(signal_name, cls)
class ZFind: def __init__(self, num_z=5, inifile=None, dest=None, clobber=True): self.num_z = num_z self.inifile = inifile self.dest = dest self.clobber = clobber if self.inifile: self.set_templates_from_inifile() def set_templates_from_inifile(self): self.labels = [] self.templates = [] self.zmin = [] self.zmax = [] self.npoly = [] self.npixstep = [] if exists(self.inifile): self.option = SafeConfigParser() self.option.optionxform = str r = self.option.read(self.inifile) if len(r) == 1: for section in self.option.sections(): self.labels.append(section) if self.option.has_option(section,'template'): self.templates.append(self.option.get(section, 'template')) if self.option.has_option(section,'zmin'): self.zmin.append(self.option.getfloat(section,'zmin')) if self.option.has_option(section,'zmax'): self.zmax.append(self.option.getfloat(section,'zmax')) if self.option.has_option(section,'npoly'): self.npoly.append(self.option.getint(section,'npoly')) if self.option.has_option(section,'npixstep'): self.npixstep.append(self.option.getint(section, 'npixstep')) else: print("Cannot parse ini file %r" % self.inifile) if not self.labels: self.labels = None if not self.templates: self.templates = None if not self.zmin: self.zmin = None if not self.zmax: self.zmax = None if not self.npoly: self.npoly = None if not self.npixstep: self.npixstep = None self.set_templates() else: print("WARNING: %r does not exist" % self.inifile) def set_templates(self, templates=None, zmin=None, zmax=None, npoly=None, npixstep=None): if templates: self.templates = templates if zmin: self.zmin = zmin if zmax: self.zmax = zmax if npoly: self.npoly = npoly if npixstep: self.npixstep = npixstep if type(self.templates) is str: try: self.templates = [self.templates] except: print('Templates not a list and unable to convert to list!') sys.exit(1) if type(self.templates) is list: self.templates = list(map(str, self.templates)) if self.zmin is not None: if type(self.zmin) is not list: try: self.zmin = [self.zmin] except: try: self.zmin = self.zmin.tolist() except: print('Can\'t convert zmin to list - defaulting to full zrange!') self.zmin = None self.zmax = None if type(self.zmin) is list: if len(self.zmin) != len(self.templates): print('Length of zmin doesn\'t match length of templates - defaulting to full zrange!') self.zmin = None self.zmax = None if self.zmax is None: print('zmax not given - defaulting to full zrange!') self.zmin = None self.zmax = None else: if type(self.zmax) is not list: try: self.zmax = [self.zmax] except: try: self.zmax = self.zmax.tolist() except: print('Can\'t convert zmax to list - defaulting to full zrange!') self.zmin = None self.zmax = None if len(self.zmin) != len(self.zmax): print('Length of zmin and zmax don\'t match - defaulting to full zrange!') self.zmin = None self.zmax = None #import pdb; pdb.set_trace() if self.npoly is None: self.npoly = [4]*len(self.templates) else: if type(self.npoly) is not list: try: self.npoly = [self.npoly] except: try: self.npoly = self.npoly.tolist() except: print('npoly not a list and unable to convert to \ list - defaulting to npoly=4 for all templates!') self.npoly = [4]*len(self.templates) else: self.npoly = list(map(int, self.npoly)) if self.npixstep is None: self.npixstep = [1]*len(self.templates) else: if type(self.npixstep) is not list: try: self.npixstep = [self.npixstep] except: try: self.npixstep = self.npixstep.tolist() except: print('npixstep not a list and unable to convert to \ list - defaulting to npixstep=1 for all \ templates!') self.npixstep = [1]*len(self.templates) else: self.npixstep = list(map(int, self.npixstep)) def reduce_plate_mjd(self, plate, mjd, fiberid=None, chi2file=False): self.chi2file = chi2file # Check types and try to convert to proper types if necessary if fiberid is None: fiberid = [i for i in range(1000)] else: if type(fiberid) is not list: try: fiberid = [fiberid] fiberid = list(map(int, fiberid)) except: try: fiberid = fiberid.tolist() fiberid = list(map(int, fiberid)) except: print('fiberid not set properly - running full plate!') fiberid = [i for i in range(1000)] else: fiberid = list(map(int, fiberid)) # Spec specs = spec.Spec(plate=plate, mjd=mjd, fiberid=fiberid) # ZFinder, ZFitter zfindobjs = [] zfitobjs = [] if (self.zmin is not None) & (self.zmax is not None): for i in range(len(self.templates)): zfindobjs.append( zfinder.ZFinder(fname=self.templates[i], npoly=self.npoly[i], zmin=self.zmin[i], zmax=self.zmax[i]) ) zfindobjs[i].zchi2( specs.flux, specs.loglambda, specs.ivar, npixstep=self.npixstep[i], plate=plate, mjd=mjd, fiberid=fiberid[0], chi2file=self.chi2file ) zfitobjs.append( zfitter.ZFitter(zfindobjs[i].zchi2arr, zfindobjs[i].zbase) ) zfitobjs[i].z_refine() else: for i in range(len(self.templates)): zfindobjs.append( zfinder.ZFinder(fname=self.templates[i], npoly=self.npoly[i], npixstep=self.npixstep[i]) ) zfindobjs[i].zchi2( specs.flux, specs.loglambda, specs.ivar, npixstep=self.npixstep[i], plate=plate, mjd=mjd, fiberid=fiberid[0], chi2file=self.chi2file ) zfitobjs.append( zfitter.ZFitter(zfindobjs[i].zchi2arr, zfindobjs[i].zbase) ) zfitobjs[i].z_refine() # Flags flags = [] for i in range(len(zfindobjs)): flags.append( misc.comb_flags(specs, zfindobjs[i], zfitobjs[i]) ) # ZPicker if len(self.templates) == 1: zpick = zpicker.ZPicker(specs, zfindobjs[0], zfitobjs[0], flags[0]) elif len(self.templates) == 2: zpick = zpicker.ZPicker(specs, zfindobjs[0], zfitobjs[0], flags[0], zfindobjs[1], zfitobjs[1], flags[1]) elif len(self.templates) == 3: zpick = zpicker.ZPicker(specs, zfindobjs[0], zfitobjs[0], flags[0], zfindobjs[1], zfitobjs[1], flags[1], zfindobjs[2], zfitobjs[2], flags[2]) elif len(self.templates) == 4: zpick = zpicker.ZPicker(specs, zfindobjs[0], zfitobjs[0], flags[0], zfindobjs[1], zfitobjs[1], flags[1], zfindobjs[2], zfitobjs[2], flags[2], zfindobjs[3], zfitobjs[3], flags[3]) elif len(self.templates) == 5: zpick = zpicker.ZPicker(specs, zfindobjs[0], zfitobjs[0], flags[0], zfindobjs[1], zfitobjs[1], flags[1], zfindobjs[2], zfitobjs[2], flags[2], zfindobjs[3], zfitobjs[3], flags[3], zfindobjs[4], zfitobjs[4], flags[4]) output = None # Write output if self.dest is None: output = io.WriteRedmonster(zpick, clobber=self.clobber) else: if type(self.dest) is str: output = io.WriteRedmonster(zpick, dest=self.dest, clobber=self.clobber) else: try: self.dest = str(self.dest) output = io.WriteRedmonster(zpick, dest=self.dest, clobber=self.clobber) except: print('Could not convert dest to string - writing to default directory and NOT clobbering old files!') output = io.WriteRedmonster(zpick, clobber=True) if output: if len(zpick.fiberid) == 1: output.write_fiberid() else: output.write_plate()
key=bytes("alert", encoding='utf-8'), value=bytes(message, encoding='utf-8')) producer.send(device, key=bytes("alert", encoding='utf-8'), value=bytes(message, encoding='utf-8')) elif event.action == oldAction and event.action == 'held' and oldDirection != event.direction: oldDirection = event.direction producer.send('alerts', key=bytes("alert", encoding='utf-8'), value=bytes(message, encoding='utf-8')) oldAction = event.action sense.stick.direction_any = mid while True: readings = {} readings['orentation'] = sense.get_orientation() readings['compass'] = sense.get_compass_raw() readings['gyroscope'] = sense.gyro_raw readings['accelerometer'] = sense.accel_raw x = x + 1 message = json.dumps(readings) producer.send(device, key=bytes("message", encoding='utf-8'), value=bytes(message, encoding='utf-8')) time.sleep(parser.getfloat('device', 'sendSleep'))
# Init config = SafeConfigParser() config.read('config.ini') data_dir = config.get('Test','data_dir') data_file_name = config.get('Test','data_file_name') freq_file_name = config.get('Test','freq_file_name') power_file_name = config.get('Test','power_file_name') sig_gen_ip = config.get('IP','sig_gen_ip') sig_gen_clk_ip = config.get('IP','sig_gen_clk_ip') afc_idn = config.get('EPICS_IDN','afc_epics_idn') sig_ana_idn = config.get('EPICS_IDN','sig_ana_epics_idn') num_samples = config.getint('Test','num_samples') fs = config.getfloat('Test','fs') adc_resolution_bits = config.getint('Test','adc_resolution_bits') sig_gen_level = config.get('Test','sig_gen_level') sig_gen_clk_level = config.get('Test','sig_gen_clk_level') bpm_channel = config.get('Test','bpm_channel') power_array = [] # initializing array that receives power of each freq sig_gen, sig_gen_clk, bpm = init_instruments(sig_gen_config = [sig_gen_ip,'visa'], sig_gen_clk_config = [sig_gen_clk_ip, 'visa'], bpm_config = [afc_idn,'epics']) create_data_dir(data_dir) sys.stdout.write("\nRunning test...\n\n")