class DeterministicRunner(DynamicModel): def __init__(self, configuration, modelTime, initialState = None): DynamicModel.__init__(self) self.modelTime = modelTime self.model = PCRGlobWB(configuration, modelTime, initialState) self.reporting = Reporting(configuration, self.model, modelTime) def initial(self): pass def dynamic(self): # re-calculate current model time using current pcraster timestep value self.modelTime.update(self.currentTimeStep()) # update model (will pick up current model time from model time object) self.model.read_forcings() self.model.update(report_water_balance=True) #do any needed reporting for this time step self.reporting.report()
class TestClassReport: def setup(self): l = 'abc' self.report = Reporting() self.empty_report = Reporting() for i in range(1, 4): user = User(100 * i) self.report.user_objects["alice" + l[i - 1]] = user user.create("alice" + l[i - 1], "*****@*****.**") merchant = Merchant(1.25 + float(i)) self.report.merchant_objects["jack" + l[i - 1]] = merchant merchant.create("jack" + l[i - 1], "*****@*****.**") transact = Transact(user, merchant, 100, "withdrawl") self.report.transact_objects[i] = transact transact.check_transact() def test_total_discount_merchant(self): assert self.report.total_discount_merchant("jacka") == 2.25 assert self.report.total_discount_merchant("jackb") == 3.25 assert self.report.total_discount_merchant("jackc") == 4.25 def test_dues_user(self): assert self.report.dues_user() == 300.0 def test_users_at_credit_limit(self): assert self.report.users_at_credit_limit() == "alicea" def test_dues_user_empty(self): self.empty_report.user_objects = [] assert self.empty_report.users_at_credit_limit() == None
def __init__(self, configuration, modelTime): DynamicModel.__init__(self) self.modelTime = modelTime self.model = ModflowOfflineCoupling(configuration, modelTime) self.reporting = Reporting(configuration, self.model, modelTime)
class Pipeline(object): def __init__(self): self.retrieval = Retrieval() self.reporting = Reporting() def run(self): self.retrieval.run() self.reporting.run()
class DeterministicRunner(DynamicModel): def __init__(self, configuration, modelTime, initialState = None): DynamicModel.__init__(self) self.modelTime = modelTime self.model = PCRGlobWB(configuration, modelTime, initialState) self.reporting = Reporting(configuration, self.model, modelTime) def initial(self): pass def dynamic(self): # re-calculate current model time using current pcraster timestep value self.modelTime.update(self.currentTimeStep()) self.model.read_forcings() self.model.update(report_water_balance=True) # get observation data sattelite_satDegUpp000005 = self.get_satDegUpp000005_from_observation() # set upper soil moisture state based on observation data self.set_satDegUpp000005(sattelite_satDegUpp000005) # do any needed reporting for this time step self.reporting.report() def get_satDegUpp000005_from_observation(self): # assumption for observation values # - this should be replaced by values from the ECV soil moisture value (sattelite data) # - uncertainty should be included here # - note that the value should be between 0.0 and 1.0 observed_satDegUpp000005 = pcr.min(1.0,\ pcr.max(0.0,\ pcr.normal(pcr.boolean(1)) + 1.0)) return observed_satDegUpp000005 def set_satDegUpp000005(self, observed_satDegUpp000005): # ratio between observation and model ratio_between_observation_and_model = pcr.ifthenelse(self.model.landSurface.satDegUpp000005> 0.0, observed_satDegUpp000005 / \ self.model.landSurface.satDegUpp000005, 0.0) # updating upper soil states for all lad cover types for coverType in self.model.landSurface.coverTypes: # correcting upper soil state (storUpp000005) self.model.landSurface.landCoverObj[coverType].storUpp000005 *= ratio_between_observation_and_model # if model value = 0.0, storUpp000005 is calculated based on storage capacity (model parameter) and observed saturation degree self.model.landSurface.landCoverObj[coverType].storUpp000005 = pcr.ifthenelse(self.model.landSurface.satDegUpp000005 > 0.0,\ self.model.landSurface.landCoverObj[coverType].storUpp000005,\ observed_satDegUpp000005 * self.model.landSurface.parameters.storCapUpp000005)
def setup(self): l = 'abc' self.report = Reporting() self.empty_report = Reporting() for i in range(1, 4): user = User(100 * i) self.report.user_objects["alice" + l[i - 1]] = user user.create("alice" + l[i - 1], "*****@*****.**") merchant = Merchant(1.25 + float(i)) self.report.merchant_objects["jack" + l[i - 1]] = merchant merchant.create("jack" + l[i - 1], "*****@*****.**") transact = Transact(user, merchant, 100, "withdrawl") self.report.transact_objects[i] = transact transact.check_transact()
def instantiate_testrail(request): if request.config.getoption("--skip-testrail"): tr_client = Reporting() else: tr_client = APIClient(request.config.getini("tr_url"), request.config.getini("tr_user"), request.config.getini("tr_pass"), request.config.getini("tr_project_id")) yield tr_client
def __init__(self, configuration, modelTime, initialState=None, system_argument=None): DynamicModel.__init__(self) self.modelTime = modelTime self.model = PCRGlobWB(configuration, modelTime, initialState) self.reporting = Reporting(configuration, self.model, modelTime) # the model will set paramaters based on global pre-multipliers given in the argument: self.adusting_parameters(configuration, system_argument) # make the configuration available for the other method/function self.configuration = configuration
def __init__(self, configuration, modelTime, initialState = None, system_argument = None): DynamicModel.__init__(self) self.modelTime = modelTime self.model = PCRGlobWB(configuration, modelTime, initialState) self.reporting = Reporting(configuration, self.model, modelTime) # the model will set paramaters based on global pre-multipliers given in the argument: if system_argument != None: self.adusting_parameters(configuration, system_argument) # option to include merging processes for pcraster maps and netcdf files: self.with_merging = True if ('with_merging' in configuration.globalOptions.keys()) and (configuration.globalOptions['with_merging'] == "False"): self.with_merging = False # make the configuration available for the other method/function self.configuration = configuration
def __init__(self, data_loader, config): """Initialize configurations.""" # Data loader. self.data_loader = data_loader # Model configurations. self.image_size = config.image_size self.encoder_mode = config.encoder_mode self.encoder_last = config.encoder_last self.encoder_start_ch = config.encoder_start_ch self.encoder_target_ch = config.encoder_target_ch self.image_layer = config.image_layer # Training configurations. self.dataset = config.dataset self.batch_size = config.batch_size self.num_iters = config.num_iters self.num_iters_decay = config.num_iters_decay self.e_lr = config.e_lr self.beta1 = config.beta1 self.beta2 = config.beta2 self.resume_iters = config.resume_iters # Test configurations. self.test_iters = config.test_iters # Miscellaneous. self.use_tensorboard = config.use_tensorboard #self.device = torch.device('cuda' if torch.cuda.is_available() else 'cpu') self.device = torch.device('cuda:' + config.gpu) self.email_address = config.email_address self.image_sending = config.image_sending # Directories. self.train_dir = config.train_dir self.log_dir = config.log_dir self.sample_dir = config.sample_dir self.model_save_dir = config.model_save_dir self.result_dir = config.result_dir # Step size. self.log_step = config.log_step self.sample_step = config.sample_step self.model_save_step = config.model_save_step self.model_save_start = config.model_save_start self.lr_update_step = config.lr_update_step # Reporting self.reporting = Reporting( self.email_address, self.image_sending, os.path.join(self.train_dir, self.sample_dir)) # Build the model and tensorboard. self.build_model() if self.use_tensorboard: self.build_tensorboard()
def __init__(self, config, logger_obj, api_watch_list): self.config = config self.logger = logger_obj self.api_watch_list = api_watch_list self.execution_timeout = self.config.get_execution_timeout() self.capture_basic_behavior = self.config.get_capture_behavior_report_basic_flag() self.capture_complete_behavior = self.config.get_capture_behavior_report_complete_flag() self.process_list = [] self.target_process_pid = None # Initialize Reporting Module self.report_generator = Reporting(config, logger_obj) # Initialize Queue for FriSpyGUI to receive the events self.event_queue_name = self.config.get_event_queue_name() self.msmqueue_event = MSMQCustom(self.event_queue_name) self.config_queue_name = self.config.get_config_queue_name() self.msmqueue_config = MSMQCustom(self.config_queue_name) # Initialize Controller self._stop_requested = threading.Event() self._reactor = Reactor(run_until_return=lambda reactor: self._stop_requested.wait()) self._device = frida.get_local_device() self._sessions = set() try: self._device.on("child-added", lambda child: self._reactor.schedule(lambda: self._on_child_added(child))) self._device.on("child-removed", lambda child: self._reactor.schedule(lambda: self._on_child_removed(child))) self._device.on("output", lambda pid, fd, data: self._reactor.schedule(lambda: self._on_output(pid, fd, data))) self._device.on("process-crashed", lambda crash: self._reactor.schedule(lambda: self._on_process_crashed(crash))) self._device.on("lost", lambda crash: self._reactor.schedule(lambda: self._on_process_crashed(crash))) except Exception as e: self.logger.log("error", "Exception - FriSpyController : run : %s" %(str(e))) self.Controller_cleaup(None) sys.exit(1)
def initialize_model(self): if self.model is not None: #already initialized return try: logger.info("PCRGlobWB: initialize_model") initial_state = None self.model = PCRGlobWB(self.configuration, self.model_time, initial_state) self.reporting = Reporting(self.configuration, self.model, self.model_time) logger.info("Shape of maps is %s", str(self.shape)) logger.info("PCRGlobWB Initialized") except: import traceback traceback.print_exc() raise
class DeterministicRunner(DynamicModel): def __init__(self, configuration, modelTime): DynamicModel.__init__(self) self.modelTime = modelTime self.model = ModflowOfflineCoupling(configuration, modelTime) self.reporting = Reporting(configuration, self.model, modelTime) def initial(self): # get or prepare the initial condition for groundwater head self.model.get_initial_heads() def dynamic(self): # re-calculate current model time using current pcraster timestep value self.modelTime.update(self.currentTimeStep()) # update model (It will pick up current model time from the modelTime object) self.model.update() # do any needed reporting for this time step self.reporting.report()
def setup(self): self.report = Reporting() self.class_map = { "user": { "class_name": User, "report_object": self.report.user_objects }, "merchant": { "class_name": Merchant, "report_object": self.report.merchant_objects }, "txn": { "class_name": Transact, "report_object": self.report.transact_objects }, "report": { "class_name": Reporting, "report_object": self.report } }
def main(): print("== DevSecOps: ignore ==\nScan github repositories for misconfigured ignore files.\n") args = Arguments() if not args.load(sys.argv[1:]): exit(0) if args.help: args.print_help() exit(0) github = Github(args.github_token) crawler = GithubCrawler(github, args.organization) results = ScanResults() try: crawler.scan(results) except KeyboardInterrupt: print("\n\n*****************************\n[W] User aborted with CTRL-C.\n*****************************\n") pass Reporting(verbose=args.verbose).print(results) GitIssueCreator(github, verbose=args.verbose, create_issue=args.create_issue, create_pr=args.create_pr).create_issues(results)
def main(): IN_initialize = Initialize() IN_reporting = Reporting() IN_audits = Audits() Output = [] analytics = IN_initialize.initialize_analyticsreporting() response = IN_reporting.get_report(analytics) management_get = IN_reporting.get_management(analytics) useful_values = IN_reporting.print_response(response) accounts = IN_reporting.get_gtm(analytics) print "######" IN_audits.check_siteSearch(useful_values) print "_______" Output.append(IN_audits.check_medium(useful_values)) #check_totalValue() print "_______" IN_audits.check_customDimensions(management_get) print "_______" #url = raw_input('Enter URL for self-Referral check: ') IN_audits.check_selfReferral('yandex', useful_values) print "_______" IN_audits.check_eventTracking(useful_values) print "_______" Output.append(IN_audits.check_adwordsLink(management_get)) print "_______" IN_audits.check_gtm(accounts) print "_______" IN_audits.check_goals(management_get) print "_______" IN_audits.check_customMetrics(management_get) print "######" print Output with open('data.json', 'w') as djson: json.dump(Output, djson)
def __init__(self, configuration, modelTime, initialState = None): DynamicModel.__init__(self) self.modelTime = modelTime self.model = PCRGlobWB(configuration, modelTime, initialState) self.reporting = Reporting(configuration, self.model, modelTime)
elif report_type == "total-dues": return "total: {}".format(reporting_object.dues_user()) else: raise Exception("Invalid input") else: raise Exception("Invalid input") except IndexError as e: return ("Expected more data {}".format(e.message)) except Exception as e: return ("Error occured {}".format(e.message)) if __name__ == '__main__': #pragma: no cover report = Reporting() """ class_map is kind of config where we could put all the classes all the available objects of the respective classes """ class_map = { "user": { "class_name": User, "report_object": report.user_objects }, "merchant": { "class_name": Merchant, "report_object": report.merchant_objects }, "txn": { "class_name": Transact, "report_object": report.transact_objects
def fit(self, data): """ sim_mode = {global, local} sim_delta = (0, 1] clustering_mode = {None, hfps, wsc} """ self.data = data if self.data.data_train_folds: samples_training = self.data.data_train_folds[self.exp] samples_test = self.data.data_valid_folds[self.exp] else: samples_training = self.data.data_train_list samples_test = self.data.data_test_list if REBOOT_MODEL: trained_model = RebootModel(self.exp, self.data.dtypes) pop = trained_model.get_model() self.population = ClassifierSets( attribute_info=data.attribute_info, dtypes=data.dtypes, rand_func=self.rng, sim_mode='global', sim_delta=0.9, clustering_method=None, cosine_matrix=self.data.sim_matrix, data_cov_inv=self.data.cov_inv, popset=pop) self.population.micro_pop_size = sum( [classifier.numerosity for classifier in pop]) self.population.pop_average_eval(self.data.no_features) analyze(pop, data) else: self.population = ClassifierSets( attribute_info=data.attribute_info, dtypes=data.dtypes, rand_func=self.rng, sim_mode='global', sim_delta=0.9, clustering_method=None, cosine_matrix=self.data.sim_matrix, data_cov_inv=self.data.cov_inv) if THRESHOLD == 1: bi_partition = one_threshold elif THRESHOLD == 2: bi_partition = rank_cut else: raise Exception("prediction threshold method unidentified!") def track_performance(samples): f_score = 0 label_prediction = set() for sample in samples: self.population.make_eval_matchset(sample[0]) if not self.population.matchset: f_score += fscore(label_prediction, sample[1]) else: # if PREDICTION_METHOD == 1: label_prediction = max_prediction([ self.population.popset[ref] for ref in self.population.matchset ], self.rng.randint) # else: # _, label_prediction = aggregate_prediction([self.population.popset[ref] # for ref in self.population.matchset]) # label_prediction = bi_partition(vote) f_score += fscore(label_prediction, sample[1]) return f_score / samples.__len__() while self.iteration < (MAX_ITERATION + 1): sample = samples_training[self.iteration % samples_training.__len__()] self.train_iteration(sample) if (self.iteration % TRACK_FREQ) == 0 and self.iteration > 0: self.timer.start_evaluation() test_fscore = track_performance(samples_test) train_fscore = track_performance(samples_training) self.population.pop_average_eval(self.data.no_features) self.training_track.write( str(self.iteration) + ", " + self.population.get_pop_tracking() + ", " + str("%.4f" % train_fscore) + ", " + str("%.4f" % test_fscore) + ", " + str("%.4f" % self.timer.get_global_timer()) + "\n") self.timer.stop_evaluation() self.track_to_plot.append([ self.iteration, train_fscore, test_fscore, self.population.ave_fitness, float(self.population.micro_pop_size / MAX_CLASSIFIER), float(self.population.popset.__len__() / MAX_CLASSIFIER) ]) self.iteration += 1 self.training_track.close() self.timer.start_evaluation() self.population.pop_average_eval(self.data.no_features) self.population.estimate_label_pr(samples_training) [train_evaluation, _, train_coverage] = self.evaluation(samples_training) [test_evaluation, test_class_precision, test_coverage] = self.evaluation(samples_test) self.timer.stop_evaluation() reporting = Reporting(self.exp) reporting.write_pop(self.population.popset, self.data.dtypes) _ = self.timer.get_global_timer() reporting.write_model_stats(self.population, self.timer, train_evaluation, train_coverage, test_evaluation, test_coverage) return [test_evaluation, test_class_precision, self.track_to_plot]
def initial(self): self.reporting = Reporting(self.configuration, self.model, self.modelTime)
def __init__(self): self.retrieval = Retrieval() self.reporting = Reporting()
class BmiPCRGlobWB(EBmi): #we use the same epoch as pcrglobwb netcdf reporting def days_since_industry_epoch(self, modeltime): return (modeltime - datetime.date(1901, 1, 1)).days def in_modeltime(self, days_since_industry_epoch): return (datetime.datetime(1901, 1, 1) + datetime.timedelta(days=days_since_industry_epoch)).date() def calculate_shape(self): # return pcr.pcr2numpy(self.model.landmask, 1e20).shape return (pcr.clone().nrRows(), pcr.clone().nrCols()) #BMI initialize (as a single step) def initialize(self, fileName): self.initialize_config(fileName) self.initialize_model() #EBMI initialize (first step of two) def initialize_config(self, fileName): logger.info("PCRGlobWB: initialize_config") try: self.configuration = Configuration(fileName, relative_ini_meteo_paths=True) pcr.setclone(self.configuration.cloneMap) # set start and end time based on configuration self.model_time = ModelTime() self.model_time.getStartEndTimeSteps( self.configuration.globalOptions['startTime'], self.configuration.globalOptions['endTime']) self.model_time.update(0) self.shape = self.calculate_shape() logger.info("Shape of maps is %s", str(self.shape)) self.model = None except: import traceback traceback.print_exc() raise #EBMI initialize (second step of two) def initialize_model(self): if self.model is not None: #already initialized return try: logger.info("PCRGlobWB: initialize_model") initial_state = None self.model = PCRGlobWB(self.configuration, self.model_time, initial_state) self.reporting = Reporting(self.configuration, self.model, self.model_time) logger.info("Shape of maps is %s", str(self.shape)) logger.info("PCRGlobWB Initialized") except: import traceback traceback.print_exc() raise def update(self): timestep = self.model_time.timeStepPCR self.model_time.update(timestep + 1) self.model.read_forcings() self.model.update(report_water_balance=True) self.reporting.report() # #numpy = pcr.pcr2numpy(self.model.landSurface.satDegUpp000005, 1e20) # numpy = pcr.pcr2numpy(self.model.landSurface.satDegUpp000005, np.NaN) # print numpy.shape # print numpy def update_until(self, time): while self.get_current_time() + 0.001 < time: self.update() def update_frac(self, time_frac): raise NotImplementedError def finalize(self): pass def get_component_name(self): return "pcrglobwb" def get_input_var_names(self): return ["top_layer_soil_saturation"] def get_output_var_names(self): return ["top_layer_soil_saturation"] def get_var_type(self, long_var_name): return 'float64' def get_var_units(self, long_var_name): #TODO: this is not a proper unit return '1' def get_var_rank(self, long_var_name): return 0 def get_var_size(self, long_var_name): return np.prod(self.get_grid_shape(long_var_name)) def get_var_nbytes(self, long_var_name): return self.get_var_size(long_var_name) * np.float64.itemsize def get_start_time(self): return self.days_since_industry_epoch(self.model_time.startTime) def get_current_time(self): return self.days_since_industry_epoch(self.model_time.currTime) def get_end_time(self): return self.days_since_industry_epoch(self.model_time.endTime) def get_time_step(self): return 1 def get_time_units(self): return "Days since 1901-01-01" def get_value(self, long_var_name): logger.info("getting value for var %s", long_var_name) if (long_var_name == "top_layer_soil_saturation"): if self.model is not None and hasattr(self.model.landSurface, 'satDegUpp000005'): #first make all NanS into 0.0 with cover, then cut out the model using the landmask. # This should not actually make a difference. remasked = pcr.ifthen( self.model.landmask, pcr.cover(self.model.landSurface.satDegUpp000005, 0.0)) pcr.report(self.model.landSurface.satDegUpp000005, "value.map") pcr.report(remasked, "remasked.map") value = pcr.pcr2numpy(remasked, np.NaN) else: logger.info( "model has not run yet, returning empty state for top_layer_soil_saturation" ) value = pcr.pcr2numpy(pcr.scalar(0.0), np.NaN) # print "getting var", value # sys.stdout.flush() doubles = value.astype(np.float64) # print "getting var as doubles!!!!", doubles result = np.flipud(doubles) # print "getting var as doubles flipped!!!!", result # sys.stdout.flush() return result else: raise Exception("unknown var name" + long_var_name) def get_value_at_indices(self, long_var_name, inds): raise NotImplementedError # def get_satDegUpp000005_from_observation(self): # # # assumption for observation values # # - this should be replaced by values from the ECV soil moisture value (sattelite data) # # - uncertainty should be included here # # - note that the value should be between 0.0 and 1.0 # observed_satDegUpp000005 = pcr.min(1.0,\ # pcr.max(0.0,\ # pcr.normal(pcr.boolean(1)) + 1.0)) # return observed_satDegUpp000005 def set_satDegUpp000005(self, src): mask = np.isnan(src) src[mask] = 1e20 observed_satDegUpp000005 = pcr.numpy2pcr(pcr.Scalar, src, 1e20) pcr.report(observed_satDegUpp000005, "observed.map") constrained_satDegUpp000005 = pcr.min( 1.0, pcr.max(0.0, observed_satDegUpp000005)) pcr.report(constrained_satDegUpp000005, "constrained.map") pcr.report(self.model.landSurface.satDegUpp000005, "origmap.map") diffmap = constrained_satDegUpp000005 - self.model.landSurface.satDegUpp000005 pcr.report(diffmap, "diffmap.map") # ratio between observation and model ratio_between_observation_and_model = pcr.ifthenelse(self.model.landSurface.satDegUpp000005 > 0.0, constrained_satDegUpp000005 / \ self.model.landSurface.satDegUpp000005, 0.0) # updating upper soil states for all lad cover types for coverType in self.model.landSurface.coverTypes: # correcting upper soil state (storUpp000005) self.model.landSurface.landCoverObj[ coverType].storUpp000005 *= ratio_between_observation_and_model # if model value = 0.0, storUpp000005 is calculated based on storage capacity (model parameter) and observed saturation degree self.model.landSurface.landCoverObj[coverType].storUpp000005 = pcr.ifthenelse( self.model.landSurface.satDegUpp000005 > 0.0, \ self.model.landSurface.landCoverObj[coverType].storUpp000005, \ constrained_satDegUpp000005 * self.model.landSurface.parameters.storCapUpp000005) # correct for any scaling issues (value < 0 or > 1 do not make sense self.model.landSurface.landCoverObj[ coverType].storUpp000005 = pcr.min( 1.0, pcr.max( 0.0, self.model.landSurface.landCoverObj[coverType]. storUpp000005)) def set_value(self, long_var_name, src): if self.model is None or not hasattr(self.model.landSurface, 'satDegUpp000005'): logger.info("cannot set value for %s, as model has not run yet.", long_var_name) return logger.info("setting value for %s", long_var_name) # logger.info("dumping state to %s", self.configuration.endStateDir) # self.model.dumpStateDir(self.configuration.endStateDir + "/pre/") # print "got value to set", src # make sure the raster is the right side up src = np.flipud(src) # print "flipped", src # cast to pcraster precision src = src.astype(np.float32) # print "as float 32", src sys.stdout.flush() logger.info("setting value shape %s", src.shape) if (long_var_name == "top_layer_soil_saturation"): self.set_satDegUpp000005(src) else: raise Exception("unknown var name" + long_var_name) # write state here to facilitate restarting tomorrow # logger.info("dumping state to %s", self.configuration.endStateDir) # self.model.dumpStateDir(self.configuration.endStateDir + "/post/") def set_value_at_indices(self, long_var_name, inds, src): raise NotImplementedError def get_grid_type(self, long_var_name): return BmiGridType.UNIFORM def get_grid_shape(self, long_var_name): return def get_grid_shape(self, long_var_name): return self.shape def get_grid_spacing(self, long_var_name): cellsize = pcr.clone().cellSize() return np.array([cellsize, cellsize]) def get_grid_origin(self, long_var_name): north = pcr.clone().north() cellSize = pcr.clone().cellSize() nrRows = pcr.clone().nrRows() south = north - (cellSize * nrRows) west = pcr.clone().west() return np.array([south, west]) def get_grid_x(self, long_var_name): raise ValueError def get_grid_y(self, long_var_name): raise ValueError def get_grid_z(self, long_var_name): raise ValueError def get_grid_connectivity(self, long_var_name): raise ValueError def get_grid_offset(self, long_var_name): raise ValueError #EBMI functions def set_start_time(self, start_time): self.model_time.setStartTime(self.in_modeltime(start_time)) def set_end_time(self, end_time): self.model_time.setEndTime(self.in_modeltime(end_time)) def get_attribute_names(self): raise NotImplementedError def get_attribute_value(self, attribute_name): raise NotImplementedError def set_attribute_value(self, attribute_name, attribute_value): raise NotImplementedError def save_state(self, destination_directory): logger.info("saving state to %s", destination_directory) self.model.dumpStateDir(destination_directory) def load_state(self, source_directory): raise NotImplementedError
def final_report(feature_dictionary, wals, feature_num_instances_dictionary, possibilities, errors, label): reporting = Reporting( feature_dictionary, wals, feature_num_instances_dictionary, possibilities, label, args.minIinstances ) # write reports to stdout reporting.print_order_confusion_matrix_for_feature() reporting.print_accuracy_vs_num_instances() # now write reports to file base_file_name = os.path.join(data_dir, label.lower().replace(" ", "_")) report_file_name = base_file_name + "_report.txt" csv_file_name = base_file_name + "_accuracy_data.txt" report_file = open(report_file_name, mode="w") csv_file = open(csv_file_name, mode="w") reporting.print_order_confusion_matrix_for_feature(report_file) reporting.print_accuracy_vs_num_instances(report_file) # make a csv (maybe not useful?) reporting.write_accuracy_vs_num_instances_to_as_csv(csv_file) # print the errors to a file errors.print_incorrect_guesses(report_file) report_file.close() csv_file.close()
} dict_filter_1.append(d) iterator += 1 except: continue # Filter data for processing (delete elements: [], 'u) captureFilteredDict = [] for item in dict_filter_1: d = { 'VALID_FRAME_N': str(item["valid_frame.number"]), # IP Package 'FRAME_N': str(item["frame.number"][0]), # Frame 'IP_SRC': str(item["ip.src"]), 'IP_DST': str(item["ip.dst"][0]), 'IP_DSCP': str(item["ip.dsfield.dscp"][0]), 'QOS': str(item["wlan.qos.priority"][0]), 'IP_LEN': str(item["ip.len"][0]) } captureFilteredDict.append(d) # Reporting: Create report file flReport = Reporting("report.html", configDict, captureFilteredDict) flReport.doReport() # Generate report print("Report created. Opening in browser...\n") time.sleep(0.5) new = 2 # Open in a new tab, if possible url = "file://" + os.path.realpath("report.html") # URL to report file webbrowser.open(url, new=new) # Open report in web-browser
def final_report(feature_dictionary, wals, feature_num_instances_dictionary, possibilities, errors, label): reporting = Reporting(feature_dictionary, wals, feature_num_instances_dictionary, possibilities, label, args.minIinstances) # write reports to stdout reporting.print_order_confusion_matrix_for_feature() reporting.print_accuracy_vs_num_instances() # now write reports to file base_file_name = os.path.join(data_dir, label.lower().replace(" ", "_")) report_file_name = base_file_name + "_report.txt" csv_file_name = base_file_name + "_accuracy_data.txt" report_file = open(report_file_name, mode='w') csv_file = open(csv_file_name, mode='w') reporting.print_order_confusion_matrix_for_feature(report_file) reporting.print_accuracy_vs_num_instances(report_file) # make a csv (maybe not useful?) reporting.write_accuracy_vs_num_instances_to_as_csv(csv_file) # print the errors to a file errors.print_incorrect_guesses(report_file) report_file.close() csv_file.close()
class DeterministicRunner(DynamicModel): def __init__(self, configuration, modelTime, initialState=None, system_argument=None): DynamicModel.__init__(self) self.modelTime = modelTime self.model = PCRGlobWB(configuration, modelTime, initialState) self.reporting = Reporting(configuration, self.model, modelTime) # the model will set paramaters based on global pre-multipliers given in the argument: self.adusting_parameters(configuration, system_argument) # make the configuration available for the other method/function self.configuration = configuration def adusting_parameters(self, configuration, system_argument): # it is also possible to define prefactors via the ini/configuration file: # - this will be overwrite any previous given pre-multipliers if 'prefactorOptions' in configuration.allSections: logger.info( "Adjusting some model parameters based on given values in the ini/configuration file." ) self.multiplier_for_refPotET = float( configuration. prefactorOptions['linear_multiplier_for_refPotET'] ) # linear scale # Note that this one does NOT work for the changing WMIN or Joyce land cover options. multiplier_for_degreeDayFactor = float( configuration.prefactorOptions[ 'linear_multiplier_for_degreeDayFactor']) # linear scale multiplier_for_minSoilDepthFrac = float( configuration.prefactorOptions[ 'linear_multiplier_for_minSoilDepthFrac']) # linear scale multiplier_for_kSat = float( configuration.prefactorOptions['log_10_multiplier_for_kSat'] ) # log scale multiplier_for_storCap = float( configuration.prefactorOptions['linear_multiplier_for_storCap'] ) # linear scale multiplier_for_recessionCoeff = float( configuration.prefactorOptions[ 'log_10_multiplier_for_recessionCoeff']) # log scale # saving global pre-multipliers to the log file: msg = "\n" msg += "\n" msg += "Multiplier values used: " + "\n" msg += "For minSoilDepthFrac : " + str( multiplier_for_minSoilDepthFrac) + "\n" msg += "For kSat (log-scale) : " + str( multiplier_for_kSat) + "\n" msg += "For recessionCoeff (log-scale) : " + str( multiplier_for_recessionCoeff) + "\n" msg += "For storCap : " + str( multiplier_for_storCap) + "\n" msg += "For degreeDayFactor : " + str( multiplier_for_degreeDayFactor) + "\n" msg += "For refPotET : " + str( self.multiplier_for_refPotET) + "\n" logger.info(msg) # - also to a txt file f = open( "multiplier.txt", "w" ) # this will be stored in the "map" folder of the 'outputDir' (as we set the current working directory to this "map" folder, see configuration.py) f.write(msg) f.close() # set parameter "recessionCoeff" based on the given pre-multiplier # - also saving the adjusted parameter maps to pcraster files # - these will be stored in the "map" folder of the 'outputDir' (as we set the current working directory to this "map" folder, see configuration.py) # "recessionCoeff" # minimum value is zero and using log-scale self.model.groundwater.recessionCoeff = pcr.max( 0.0, (10**(multiplier_for_recessionCoeff)) * self.model.groundwater.recessionCoeff) self.model.groundwater.recessionCoeff = pcr.min( 1.0, self.model.groundwater.recessionCoeff) # report the map pcr.report(self.model.groundwater.recessionCoeff, "recessionCoeff.map") # set parameters "kSat", "storCap", "minSoilDepthFrac", and "degreeDayFactor" based on the given pre-multipliers for coverType in self.model.landSurface.coverTypes: # "degreeDayFactor" self.model.landSurface.landCoverObj[coverType].degreeDayFactor = pcr.max(0.0, multiplier_for_degreeDayFactor *\ self.model.landSurface.landCoverObj[coverType].degreeDayFactor) # report the map pcraster_filename = "degreeDayFactor" + "_" + coverType + ".map" pcr.report( self.model.landSurface.landCoverObj[coverType].degreeDayFactor, pcraster_filename) # "kSat" and "storCap" for 2 layer model if self.model.landSurface.numberOfSoilLayers == 2: # "kSat" # minimum value is zero and using-log-scale self.model.landSurface.parameters.kSatUpp = \ pcr.max(0.0, (10**(multiplier_for_kSat)) * self.model.landSurface.parameters.kSatUpp) self.model.landSurface.parameters.kSatLow = \ pcr.max(0.0, (10**(multiplier_for_kSat)) * self.model.landSurface.parameters.kSatLow) # report the maps (for debugging) #pcraster_filename = "kSatUpp"+ "_" + coverType + ".map" #pcr.report(self.model.landSurface.parameters.kSatUpp, pcraster_filename) #pcraster_filename = "kSatLow"+ "_" + coverType + ".map" #pcr.report(self.model.landSurface.parameters.kSatLow, pcraster_filename) # "storCap" # minimum value is zero self.model.landSurface.parameters.storCapUpp = pcr.max(0.0, multiplier_for_storCap*\ self.model.landSurface.parameters.storCapUpp) self.model.landSurface.parameters.storCapLow = pcr.max(0.0, multiplier_for_storCap*\ self.model.landSurface.parameters.storCapLow) # report the maps (for debugging) #pcraster_filename = "storCapUpp"+ "_" + coverType + ".map" #pcr.report(self.model.landSurface.parameters.storCapUpp, pcraster_filename) #pcraster_filename = "storCapLow"+ "_" + coverType + ".map" #pcr.report(self.model.landSurface.parameters.storCapLow, pcraster_filename) # "kSat" and "storCap" for 3 layer model if self.model.landSurface.numberOfSoilLayers == 3: # "kSat" # minimum value is zero and using-log-scale self.model.landSurface.landCoverObj[coverType].parameters.kSatUpp000005 = \ pcr.max(0.0, (10**(multiplier_for_kSat)) * self.model.landSurface.landCoverObj[coverType].parameters.kSatUpp000005) self.model.landSurface.landCoverObj[coverType].parameters.kSatUpp005030 = \ pcr.max(0.0, (10**(multiplier_for_kSat)) * self.model.landSurface.landCoverObj[coverType].parameters.kSatUpp005030) self.model.landSurface.landCoverObj[coverType].parameters.kSatLow030150 = \ pcr.max(0.0, (10**(multiplier_for_kSat)) * self.model.landSurface.landCoverObj[coverType].parameters.kSatLow030150) # report the maps pcraster_filename = "kSatUpp000005" + "_" + coverType + ".map" pcr.report( self.model.landSurface.landCoverObj[coverType].parameters. kSatUpp000005, pcraster_filename) pcraster_filename = "kSatUpp005030" + "_" + coverType + ".map" pcr.report( self.model.landSurface.landCoverObj[coverType].parameters. kSatUpp005030, pcraster_filename) pcraster_filename = "kSatLow030150" + "_" + coverType + ".map" pcr.report( self.model.landSurface.landCoverObj[coverType].parameters. kSatLow030150, pcraster_filename) # "storCap" # minimum value is zero self.model.landSurface.landCoverObj[coverType].parameters.storCapUpp000005 = pcr.max(0.0, multiplier_for_storCap*\ self.model.landSurface.landCoverObj[coverType].parameters.storCapUpp000005) self.model.landSurface.landCoverObj[coverType].parameters.storCapUpp005030 = pcr.max(0.0, multiplier_for_storCap*\ self.model.landSurface.landCoverObj[coverType].parameters.storCapUpp005030) self.model.landSurface.landCoverObj[coverType].parameters.storCapLow030150 = pcr.max(0.0, multiplier_for_storCap*\ self.model.landSurface.landCoverObj[coverType].parameters.storCapLow030150) # report the maps pcraster_filename = "storCapUpp000005" + "_" + coverType + ".map" pcr.report( self.model.landSurface.landCoverObj[coverType].parameters. storCapUpp000005, pcraster_filename) pcraster_filename = "storCapUpp005030" + "_" + coverType + ".map" pcr.report( self.model.landSurface.landCoverObj[coverType].parameters. storCapUpp005030, pcraster_filename) pcraster_filename = "storCapLow030150" + "_" + coverType + ".map" pcr.report( self.model.landSurface.landCoverObj[coverType].parameters. storCapLow030150, pcraster_filename) # re-calculate rootZoneWaterStorageCap as the consequence of the modification of "storCap" # This is WMAX in the oldcalc script. if self.model.landSurface.numberOfSoilLayers == 2: self.model.landSurface.parameters.rootZoneWaterStorageCap = self.model.landSurface.parameters.storCapUpp +\ self.model.landSurface.parameters.storCapLow if self.model.landSurface.numberOfSoilLayers == 3: self.model.landSurface.landCoverObj[coverType].parameters.rootZoneWaterStorageCap = self.model.landSurface.landCoverObj[coverType].parameters.storCapUpp000005 +\ self.model.landSurface.landCoverObj[coverType].parameters.storCapUpp005030 +\ self.model.landSurface.landCoverObj[coverType].parameters.storCapLow030150 # report the map #pcraster_filename = "rootZoneWaterStorageCap"+ "_" + coverType + ".map" #pcr.report(self.model.landSurface.parameters.rootZoneWaterStorageCap, pcraster_filename) # "minSoilDepthFrac" if multiplier_for_minSoilDepthFrac != 1.0: for coverType in self.model.landSurface.coverTypes: # minimum value is zero self.model.landSurface.landCoverObj[coverType].minSoilDepthFrac = pcr.max(0.0, multiplier_for_minSoilDepthFrac*\ self.model.landSurface.landCoverObj[coverType].minSoilDepthFrac) # for minSoilDepthFrac - values will be limited by maxSoilDepthFrac self.model.landSurface.landCoverObj[coverType].minSoilDepthFrac = pcr.min(\ self.model.landSurface.landCoverObj[coverType].minSoilDepthFrac,\ self.model.landSurface.landCoverObj[coverType].maxSoilDepthFrac) # maximum value is 1.0 self.model.landSurface.landCoverObj[ coverType].minSoilDepthFrac = pcr.min( 1.0, self.model.landSurface.landCoverObj[coverType]. minSoilDepthFrac) # report the map pcraster_filename = "minSoilDepthFrac" + "_" + coverType + ".map" pcr.report( self.model.landSurface.landCoverObj[coverType]. minSoilDepthFrac, pcraster_filename) # re-calculate arnoBeta (as the consequence of the modification of minSoilDepthFrac) self.model.landSurface.landCoverObj[coverType].arnoBeta = pcr.max(0.001,\ (self.model.landSurface.landCoverObj[coverType].maxSoilDepthFrac-1.)/(1.-self.model.landSurface.landCoverObj[coverType].minSoilDepthFrac)+\ self.model.landSurface.parameters.orographyBeta-0.01) self.model.landSurface.landCoverObj[coverType].arnoBeta = pcr.cover(pcr.max(0.001,\ self.model.landSurface.landCoverObj[coverType].arnoBeta), 0.001) # report the map pcraster_filename = "arnoBeta" + "_" + coverType + ".map" pcr.report( self.model.landSurface.landCoverObj[coverType].arnoBeta, pcraster_filename) # re-calculate rootZoneWaterStorageMin (as the consequence of the modification of minSoilDepthFrac) # This is WMIN in the oldcalc script. # WMIN (unit: m): minimum local soil water capacity within the grid-cell self.model.landSurface.landCoverObj[coverType].rootZoneWaterStorageMin = self.model.landSurface.landCoverObj[coverType].minSoilDepthFrac *\ self.model.landSurface.parameters.rootZoneWaterStorageCap # report the map pcraster_filename = "rootZoneWaterStorageMin" + "_" + coverType + ".map" pcr.report( self.model.landSurface.landCoverObj[coverType]. rootZoneWaterStorageMin, pcraster_filename) # re-calculate rootZoneWaterStorageRange (as the consequence of the modification of rootZoneWaterStorageRange and minSoilDepthFrac) # WMAX - WMIN (unit: m) self.model.landSurface.landCoverObj[coverType].rootZoneWaterStorageRange = self.model.landSurface.parameters.rootZoneWaterStorageCap -\ self.model.landSurface.landCoverObj[coverType].rootZoneWaterStorageMin # report the map #pcraster_filename = "rootZoneWaterStorageRange"+ "_" + coverType + ".map" #pcr.report(self.model.landSurface.landCoverObj[coverType].rootZoneWaterStorageRange, pcraster_filename) def initial(self): pass def dynamic(self): # re-calculate current model time using current pcraster timestep value self.modelTime.update(self.currentTimeStep()) # update model (will pick up current model time from model time object) self.model.read_forcings() self.model.update(report_water_balance=True) #do any needed reporting for this time step self.reporting.report()
class DeterministicRunner(DynamicModel): def __init__(self, configuration, modelTime, initialState=None, system_argument=None): DynamicModel.__init__(self) self.modelTime = modelTime self.model = PCRGlobWB(configuration, modelTime, initialState) self.reporting = Reporting(configuration, self.model, modelTime) # the model will set paramaters based on global pre-multipliers given in the argument: if system_argument != None: self.adusting_parameters(configuration, system_argument) # option to include merging processes for pcraster maps and netcdf files: self.with_merging = True if ('with_merging' in configuration.globalOptions.keys()) and ( configuration.globalOptions['with_merging'] == "False"): self.with_merging = False # make the configuration available for the other method/function self.configuration = configuration def adusting_parameters(self, configuration, system_argument): # global pre-multipliers given in the argument: if len(system_argument) > 4: logger.info( "Adjusting some model parameters based on given values in the system argument." ) # pre-multipliers for minSoilDepthFrac, kSat, recessionCoeff, storCap and degreeDayFactor multiplier_for_minSoilDepthFrac = float( system_argument[4] ) # linear scale # Note that this one does NOT work for the changing WMIN or Joyce land cover options. multiplier_for_kSat = float(system_argument[5]) # log scale multiplier_for_recessionCoeff = float( system_argument[6]) # log scale multiplier_for_storCap = float(system_argument[7]) # linear scale multiplier_for_degreeDayFactor = float( system_argument[8]) # linear scale # pre-multiplier for the reference potential ET self.multiplier_for_refPotET = float( system_argument[9]) # linear scale # it is also possible to define prefactors via the ini/configuration file: # - this will be overwrite any previous given pre-multipliers if 'prefactorOptions' in configuration.allSections: logger.info( "Adjusting some model parameters based on given values in the ini/configuration file." ) self.multiplier_for_refPotET = float( configuration. prefactorOptions['linear_multiplier_for_refPotET'] ) # linear scale # Note that this one does NOT work for the changing WMIN or Joyce land cover options. multiplier_for_degreeDayFactor = float( configuration.prefactorOptions[ 'linear_multiplier_for_degreeDayFactor']) # linear scale multiplier_for_minSoilDepthFrac = float( configuration.prefactorOptions[ 'linear_multiplier_for_minSoilDepthFrac']) # linear scale multiplier_for_kSat = float( configuration.prefactorOptions['log_10_multiplier_for_kSat'] ) # log scale multiplier_for_storCap = float( configuration.prefactorOptions['linear_multiplier_for_storCap'] ) # linear scale multiplier_for_recessionCoeff = float( configuration.prefactorOptions[ 'log_10_multiplier_for_recessionCoeff']) # log scale # saving global pre-multipliers to the log file: msg = "\n" msg += "\n" msg += "Multiplier values used: " + "\n" msg += "For minSoilDepthFrac : " + str( multiplier_for_minSoilDepthFrac) + "\n" msg += "For kSat (log-scale) : " + str( multiplier_for_kSat) + "\n" msg += "For recessionCoeff (log-scale) : " + str( multiplier_for_recessionCoeff) + "\n" msg += "For storCap : " + str( multiplier_for_storCap) + "\n" msg += "For degreeDayFactor : " + str( multiplier_for_degreeDayFactor) + "\n" msg += "For refPotET : " + str( self.multiplier_for_refPotET) + "\n" logger.info(msg) # - also to a txt file f = open( "multiplier.txt", "w" ) # this will be stored in the "map" folder of the 'outputDir' (as we set the current working directory to this "map" folder, see configuration.py) f.write(msg) f.close() # set parameter "recessionCoeff" based on the given pre-multiplier # - also saving the adjusted parameter maps to pcraster files # - these will be stored in the "map" folder of the 'outputDir' (as we set the current working directory to this "map" folder, see configuration.py) # "recessionCoeff" # minimum value is zero and using log-scale self.model.groundwater.recessionCoeff = pcr.max( 0.0, (10**(multiplier_for_recessionCoeff)) * self.model.groundwater.recessionCoeff) self.model.groundwater.recessionCoeff = pcr.min( 1.0, self.model.groundwater.recessionCoeff) # report the map pcr.report(self.model.groundwater.recessionCoeff, "recessionCoeff.map") # set parameters "kSat", "storCap", "minSoilDepthFrac", and "degreeDayFactor" based on the given pre-multipliers for coverType in self.model.landSurface.coverTypes: # "degreeDayFactor" self.model.landSurface.landCoverObj[coverType].degreeDayFactor = pcr.max(0.0, multiplier_for_degreeDayFactor *\ self.model.landSurface.landCoverObj[coverType].degreeDayFactor) # report the map pcraster_filename = "degreeDayFactor" + "_" + coverType + ".map" pcr.report( self.model.landSurface.landCoverObj[coverType].degreeDayFactor, pcraster_filename) # "kSat" and "storCap" for 2 layer model if self.model.landSurface.numberOfSoilLayers == 2: # "kSat" # minimum value is zero and using-log-scale self.model.landSurface.landCoverObj[coverType].parameters.kSatUpp = \ pcr.max(0.0, (10**(multiplier_for_kSat)) * self.model.landSurface.landCoverObj[coverType].parameters.kSatUpp) self.model.landSurface.landCoverObj[coverType].parameters.kSatLow = \ pcr.max(0.0, (10**(multiplier_for_kSat)) * self.model.landSurface.landCoverObj[coverType].parameters.kSatLow) # report the maps pcraster_filename = "kSatUpp" + "_" + coverType + ".map" pcr.report( self.model.landSurface.landCoverObj[coverType].parameters. kSatUpp, pcraster_filename) pcraster_filename = "kSatLow" + "_" + coverType + ".map" pcr.report( self.model.landSurface.landCoverObj[coverType].parameters. kSatLow, pcraster_filename) # "storCap" # minimum value is zero self.model.landSurface.landCoverObj[coverType].parameters.storCapUpp = pcr.max(0.0, multiplier_for_storCap*\ self.model.landSurface.landCoverObj[coverType].parameters.storCapUpp) self.model.landSurface.landCoverObj[coverType].parameters.storCapLow = pcr.max(0.0, multiplier_for_storCap*\ self.model.landSurface.landCoverObj[coverType].parameters.storCapLow) # report the maps pcraster_filename = "storCapUpp" + "_" + coverType + ".map" pcr.report( self.model.landSurface.landCoverObj[coverType].parameters. storCapUpp, pcraster_filename) pcraster_filename = "storCapLow" + "_" + coverType + ".map" pcr.report( self.model.landSurface.landCoverObj[coverType].parameters. storCapLow, pcraster_filename) # "kSat" and "storCap" for 3 layer model if self.model.landSurface.numberOfSoilLayers == 3: # "kSat" # minimum value is zero and using-log-scale self.model.landSurface.landCoverObj[coverType].parameters.kSatUpp000005 = \ pcr.max(0.0, (10**(multiplier_for_kSat)) * self.model.landSurface.landCoverObj[coverType].parameters.kSatUpp000005) self.model.landSurface.landCoverObj[coverType].parameters.kSatUpp005030 = \ pcr.max(0.0, (10**(multiplier_for_kSat)) * self.model.landSurface.landCoverObj[coverType].parameters.kSatUpp005030) self.model.landSurface.landCoverObj[coverType].parameters.kSatLow030150 = \ pcr.max(0.0, (10**(multiplier_for_kSat)) * self.model.landSurface.landCoverObj[coverType].parameters.kSatLow030150) # report the maps pcraster_filename = "kSatUpp000005" + "_" + coverType + ".map" pcr.report( self.model.landSurface.landCoverObj[coverType].parameters. kSatUpp000005, pcraster_filename) pcraster_filename = "kSatUpp005030" + "_" + coverType + ".map" pcr.report( self.model.landSurface.landCoverObj[coverType].parameters. kSatUpp005030, pcraster_filename) pcraster_filename = "kSatLow030150" + "_" + coverType + ".map" pcr.report( self.model.landSurface.landCoverObj[coverType].parameters. kSatLow030150, pcraster_filename) # "storCap" # minimum value is zero self.model.landSurface.landCoverObj[coverType].parameters.storCapUpp000005 = pcr.max(0.0, multiplier_for_storCap*\ self.model.landSurface.landCoverObj[coverType].parameters.storCapUpp000005) self.model.landSurface.landCoverObj[coverType].parameters.storCapUpp005030 = pcr.max(0.0, multiplier_for_storCap*\ self.model.landSurface.landCoverObj[coverType].parameters.storCapUpp005030) self.model.landSurface.landCoverObj[coverType].parameters.storCapLow030150 = pcr.max(0.0, multiplier_for_storCap*\ self.model.landSurface.landCoverObj[coverType].parameters.storCapLow030150) # report the maps pcraster_filename = "storCapUpp000005" + "_" + coverType + ".map" pcr.report( self.model.landSurface.landCoverObj[coverType].parameters. storCapUpp000005, pcraster_filename) pcraster_filename = "storCapUpp005030" + "_" + coverType + ".map" pcr.report( self.model.landSurface.landCoverObj[coverType].parameters. storCapUpp005030, pcraster_filename) pcraster_filename = "storCapLow030150" + "_" + coverType + ".map" pcr.report( self.model.landSurface.landCoverObj[coverType].parameters. storCapLow030150, pcraster_filename) # re-calculate rootZoneWaterStorageCap as the consequence of the modification of "storCap" # This is WMAX in the oldcalc script. if self.model.landSurface.numberOfSoilLayers == 2: self.model.landSurface.landCoverObj[coverType].parameters.rootZoneWaterStorageCap = self.model.landSurface.landCoverObj[coverType].parameters.storCapUpp +\ self.model.landSurface.landCoverObj[coverType].parameters.storCapLow if self.model.landSurface.numberOfSoilLayers == 3: self.model.landSurface.landCoverObj[coverType].parameters.rootZoneWaterStorageCap = self.model.landSurface.landCoverObj[coverType].parameters.storCapUpp000005 +\ self.model.landSurface.landCoverObj[coverType].parameters.storCapUpp005030 +\ self.model.landSurface.landCoverObj[coverType].parameters.storCapLow030150 # report the map pcraster_filename = "rootZoneWaterStorageCap" + "_" + coverType + ".map" pcr.report( self.model.landSurface.landCoverObj[coverType].parameters. rootZoneWaterStorageCap, pcraster_filename) # "minSoilDepthFrac" if multiplier_for_minSoilDepthFrac != 1.0: # minimum value is zero self.model.landSurface.landCoverObj[coverType].minSoilDepthFrac = pcr.max(0.0, multiplier_for_minSoilDepthFrac*\ self.model.landSurface.landCoverObj[coverType].minSoilDepthFrac) # for minSoilDepthFrac - values will be limited by maxSoilDepthFrac self.model.landSurface.landCoverObj[coverType].minSoilDepthFrac = pcr.min(\ self.model.landSurface.landCoverObj[coverType].minSoilDepthFrac,\ self.model.landSurface.landCoverObj[coverType].maxSoilDepthFrac) # maximum value is 1.0 self.model.landSurface.landCoverObj[ coverType].minSoilDepthFrac = pcr.min( 1.0, self.model.landSurface.landCoverObj[coverType]. minSoilDepthFrac) # report the map pcraster_filename = "minSoilDepthFrac" + "_" + coverType + ".map" pcr.report( self.model.landSurface.landCoverObj[coverType]. minSoilDepthFrac, pcraster_filename) # re-calculate arnoBeta (as the consequence of the modification of minSoilDepthFrac) self.model.landSurface.landCoverObj[coverType].arnoBeta = pcr.max(0.001,\ (self.model.landSurface.landCoverObj[coverType].maxSoilDepthFrac-1.)/(1.-self.model.landSurface.landCoverObj[coverType].minSoilDepthFrac)+\ self.model.landSurface.landCoverObj[coverType].parameters.orographyBeta-0.01) self.model.landSurface.landCoverObj[coverType].arnoBeta = pcr.cover(pcr.max(0.001,\ self.model.landSurface.landCoverObj[coverType].arnoBeta), 0.001) # report the map pcraster_filename = "arnoBeta" + "_" + coverType + ".map" pcr.report( self.model.landSurface.landCoverObj[coverType].arnoBeta, pcraster_filename) # re-calculate rootZoneWaterStorageMin (as the consequence of the modification of minSoilDepthFrac) # This is WMIN in the oldcalc script. # WMIN (unit: m): minimum local soil water capacity within the grid-cell self.model.landSurface.landCoverObj[coverType].rootZoneWaterStorageMin = self.model.landSurface.landCoverObj[coverType].minSoilDepthFrac *\ self.model.landSurface.landCoverObj[coverType].parameters.rootZoneWaterStorageCap # report the map pcraster_filename = "rootZoneWaterStorageMin" + "_" + coverType + ".map" pcr.report( self.model.landSurface.landCoverObj[coverType]. rootZoneWaterStorageMin, pcraster_filename) # re-calculate rootZoneWaterStorageRange (as the consequence of the modification of rootZoneWaterStorageRange and minSoilDepthFrac) # WMAX - WMIN (unit: m) self.model.landSurface.landCoverObj[coverType].rootZoneWaterStorageRange = self.model.landSurface.landCoverObj[coverType].parameters.rootZoneWaterStorageCap -\ self.model.landSurface.landCoverObj[coverType].rootZoneWaterStorageMin # report the map pcraster_filename = "rootZoneWaterStorageRange" + "_" + coverType + ".map" pcr.report( self.model.landSurface.landCoverObj[coverType]. rootZoneWaterStorageRange, pcraster_filename) def initial(self): pass def dynamic(self): # re-calculate current model time using current pcraster timestep value self.modelTime.update(self.currentTimeStep()) # read model forcing (will pick up current model time from model time object) self.model.read_forcings() # adjust the reference potential ET according to the given pre-multiplier self.model.meteo.referencePotET = self.model.meteo.referencePotET * self.multiplier_for_refPotET # update model (will pick up current model time from model time object) # - for a run coupled to MODFLOW, water balance checks are not valid due to lateral flow. if self.configuration.online_coupling_between_pcrglobwb_and_modflow: self.model.update(report_water_balance=False) else: self.model.update(report_water_balance=True) # do any needed reporting for this time step self.reporting.report() # at the last day of the month, stop calculation until modflow and related merging process are ready (only for a run with modflow) if self.modelTime.isLastDayOfMonth() and (self.configuration.online_coupling_between_pcrglobwb_and_modflow or\ self.with_merging): # wait until modflow files are ready if self.configuration.online_coupling_between_pcrglobwb_and_modflow: modflow_is_ready = False self.count_check = 0 while modflow_is_ready == False: if datetime.datetime.now().second == 14 or\ datetime.datetime.now().second == 29 or\ datetime.datetime.now().second == 34 or\ datetime.datetime.now().second == 59: \ modflow_is_ready = self.check_modflow_status() # wait until merged files are ready merged_files_are_ready = False while merged_files_are_ready == False: self.count_check = 0 if datetime.datetime.now().second == 14 or\ datetime.datetime.now().second == 29 or\ datetime.datetime.now().second == 34 or\ datetime.datetime.now().second == 59: \ merged_files_are_ready = self.check_merging_status() def check_modflow_status(self): status_file = str( self.configuration.main_output_directory ) + "/modflow/transient/maps/modflow_files_for_" + str( self.modelTime.fulldate) + "_are_ready.txt" msg = 'Waiting for the file: ' + status_file if self.count_check == 1: logger.info(msg) if self.count_check < 7: #~ logger.debug(msg) # INACTIVATE THIS AS THIS MAKE A HUGE DEBUG (dbg) FILE self.count_check += 1 status = os.path.exists(status_file) if status == False: return status if status: self.count_check = 0 return status def check_merging_status(self): status_file = str(self.configuration.main_output_directory ) + "/global/maps/merged_files_for_" + str( self.modelTime.fulldate) + "_are_ready.txt" msg = 'Waiting for the file: ' + status_file if self.count_check == 1: logger.info(msg) if self.count_check < 7: #~ logger.debug(msg) # INACTIVATE THIS AS THIS MAKE A HUGE DEBUG (dbg) FILE self.count_check += 1 status = os.path.exists(status_file) if status == False: return status if status: self.count_check = 0 return status
class FriSpyControllerApp(object): def __init__(self, config, logger_obj, api_watch_list): self.config = config self.logger = logger_obj self.api_watch_list = api_watch_list self.execution_timeout = self.config.get_execution_timeout() self.capture_basic_behavior = self.config.get_capture_behavior_report_basic_flag() self.capture_complete_behavior = self.config.get_capture_behavior_report_complete_flag() self.process_list = [] self.target_process_pid = None # Initialize Reporting Module self.report_generator = Reporting(config, logger_obj) # Initialize Queue for FriSpyGUI to receive the events self.event_queue_name = self.config.get_event_queue_name() self.msmqueue_event = MSMQCustom(self.event_queue_name) self.config_queue_name = self.config.get_config_queue_name() self.msmqueue_config = MSMQCustom(self.config_queue_name) # Initialize Controller self._stop_requested = threading.Event() self._reactor = Reactor(run_until_return=lambda reactor: self._stop_requested.wait()) self._device = frida.get_local_device() self._sessions = set() try: self._device.on("child-added", lambda child: self._reactor.schedule(lambda: self._on_child_added(child))) self._device.on("child-removed", lambda child: self._reactor.schedule(lambda: self._on_child_removed(child))) self._device.on("output", lambda pid, fd, data: self._reactor.schedule(lambda: self._on_output(pid, fd, data))) self._device.on("process-crashed", lambda crash: self._reactor.schedule(lambda: self._on_process_crashed(crash))) self._device.on("lost", lambda crash: self._reactor.schedule(lambda: self._on_process_crashed(crash))) except Exception as e: self.logger.log("error", "Exception - FriSpyController : run : %s" %(str(e))) self.Controller_cleaup(None) sys.exit(1) def run(self, target_process): # Schedule the execution of target process try: if not target_process: self.logger.log("error", "FriSpyController: run: FriSpy Controller failed to locate target process ") self.Controller_cleaup(None) sys.exit(1) self._reactor.schedule(lambda: self._start(target_process)) except Exception as e: self.logger.log("error", "Exception - FriSpyController : schedule_start : %s" %(str(e))) self.Controller_cleaup(None) sys.exit(1) # Initiate the execution of target process try: self._reactor.run() except Exception as e: self.logger.log("error", "Exception - FriSpyController : run : %s" %(str(e))) self.Controller_cleaup(None) sys.exit(1) def _start(self, target_process): # Spawn the target process try: pid = self._device.spawn((target_process,)) if pid: self.target_process_pid = pid try: self._instrument(pid, target_process) except Exception as e: self.logger.log("error", "Exception - FriSpyController : _start : Failed to instrument : %s" % (traceback.print_exc(file=sys.stdout))) self.Controller_cleaup(None) sys.exit(1) else: self.logger.log("error", "Error - FriSpyController : _start : Failed to instrument") self.Controller_cleaup(None) sys.exit(1) except Exception as e: self.logger.log("error", "Exception - FriSpyController : Failed to spawn : %s" %(str(traceback.print_exc(file=sys.stdout)))) self.Controller_cleaup(None) sys.exit(1) def _stop_if_idle(self): if len(self._sessions) == 0: self._stop_requested.set() def prepare_script(self): try: self.win_lib_dir = self.config.get_win_lib_directory() self.common_lib_dir = self.config.get_common_lib_directory() self.common_def_file_name = self.config.get_common_def_file() self.common_def_file_path = os.path.join(self.common_lib_dir, self.common_def_file_name) self.script_file_content = "" #Load common-lib common_lib_content = self.get_api_lib_implementation(self.common_def_file_path) if common_lib_content == None: self.logger.log("error", "FriSpyController: prepare_script : Failed to load common lib") return False timeout_checker = ''' var timeout_check = function(timeout){ var send_data = {} send_data.Date = Date() send_data.api_name = "timeout_check" send_data.module_name = null send_data.api_arguments = null send(JSON.stringify(send_data, null, 4)); } setInterval(timeout_check, %d) ''' timeout_in_milliseconds = self.config.get_execution_timeout() * 1000# converting seconds to milliseconds timeout_checker_content = ((timeout_checker) % timeout_in_milliseconds) self.script_file_content = "//////// Common Lib ////////\n\n%s\n%s\n" % (common_lib_content, timeout_checker_content) self.logger.log("info", "common-lib loaded successfully") appended_lib_content = "" for lib_info in self.api_watch_list: # Get API lib info lib_file_parent = lib_info.split("_")[0] lib_file_name = lib_info.split("_")[1] lib_file_path = os.path.join(self.win_lib_dir ,lib_file_parent, lib_file_name) + ".js" lib_content = self.get_api_lib_implementation(lib_file_path) if lib_content != None: appended_lib_content = "%s\n//%s\n%s\n" % (appended_lib_content, lib_file_path, lib_content) if appended_lib_content == "": self.logger.log("error", "FriSpyController: prepare_script : Failed to prepare Script") return False self.script_file_content = "%s\n%s\n" % (self.script_file_content, appended_lib_content) status = self.save_script() if not status: return False except Exception as e: self.logger.log("error", "Exception - FriSpyController: prepare_script : Failed to prepare Script: %s"% (str(e))) return False return True def save_script(self): self.api_injector_file = os.path.join(self.config.get_config_directory(), self.config.get_api_injector()) self.logger.log("info", "Saving Script...") if len(self.script_file_content) > 0: try: with open(self.api_injector_file, "w") as filehandle: filehandle.write(self.script_file_content) self.logger.log("info", "Script saved successfully")# : '%s'" % (self.script_file_content)) except Exception as e: self.logger.log("error", "Exception: Failed to save Script watch list : %s" % str(e)) return False else: self.logger.log("error", "Injector Script is empty") return False return True def get_api_lib_implementation(self, api_lib_file_path): lib_content = None if os.path.exists(api_lib_file_path): try: with open (api_lib_file_path, "r") as libhandle: lib_content = libhandle.read() self.logger.log("info", "API Launch Successful : %s" % api_lib_file_path) except Exception as e: self.logger.log("warn", "Exception: Launch Failed : %s"% str(e)) else: self.logger.log("warn", "Launch Failed : %s"% api_lib_file_path) return lib_content def _instrument(self, pid, path): try: self.logger.log("","Target process attach : pid=({})".format(pid)) processinfo = {} processinfo["pid"] = pid processinfo["path"] = path self.logger.log_to_gui("ProcessCreate", "%s" %(json.dumps(processinfo))) self.process_list.append(pid) session = self._device.attach(pid) session.on("detached", lambda reason: self._reactor.schedule(lambda: self._on_detached(pid, session, reason))) session.enable_child_gating() scriptFile = self.script_file_content script = session.create_script(scriptFile) script.on("message", lambda message, data: self._reactor.schedule(lambda: self._on_message(pid, message))) self.logger.log("","Load Injector module") script.load() self.logger.log("","Resume Execution : pid=({})".format(pid)) self.target_exec_start_time = datetime.now() self.logger.log("","Eventing started {} ".format(self.target_exec_start_time)) self.target_exec_end_time = self.target_exec_start_time + timedelta(seconds=self.execution_timeout) self._device.resume(pid) self._sessions.add(session) except Exception as e: self.logger.log("error", "Exception: instrument(): %s" % (str(e))) self.Controller_cleaup(None) sys.exit(1) def _on_child_added(self, child): self.logger.log(""," child_added: {}".format(child)) self._instrument(child.pid, child.path) def _on_child_removed(self, child): self.logger.log(""," child_removed: {}".format(child)) def _on_output(self, pid, fd, data): self.logger.log(""," output: pid={}, fd={}, data={}".format(pid, fd, repr(data))) def _on_detached(self, pid, session, reason): self.logger.log(""," detached: pid={}, reason='{}'".format(pid, reason)) self._device.kill(pid) self._sessions.remove(session) self._reactor.schedule(self._stop_if_idle, delay=0.5) if len(self._sessions) == 0: self.logger.log("","Exiting FriSpy...") def _on_process_crashed(crash): print("on_process_crashed") print("\tcrash:", crash) def is_ready_to_process(self, jsonobj): if "ObjectAttributes" in jsonobj["api_arguments"] and jsonobj["api_arguments"]["ObjectAttributes"] == "0x0": return False if jsonobj["api_name"] == "OpenProcessToken" and "ProcessHandle" in jsonobj["api_arguments"] and jsonobj["api_arguments"]["ProcessHandle"] == "0xffffffff": return False if (jsonobj["api_name"] == "CreateEventExW" or jsonobj["api_name"] == "CreateEventExA" or jsonobj["api_name"] == "CreateEventA" or jsonobj["api_name"] == "CreateEventW" or jsonobj["api_name"] == "CreateMutexExW" or jsonobj["api_name"] == "CreateMutexW" or jsonobj["api_name"] == "CreateSemaphoreExW") and "lpName" in jsonobj["api_arguments"] and jsonobj["api_arguments"]["lpName"] == "0x0": return False return True def _on_message(self, pid, message): try: if message["type"] == "error": # Check for errors in loading Injector Module print (message) self.logger.log("error","Error: Injector Module: %s - %s" % ( message["description"], message["stack"])) self.Controller_cleaup(pid) sys.exit(1) # Get the events from CoreEngine jsobj = (json.loads(message["payload"])) # Abort the execution if the Execution Timeout is triggered if "api_name" in jsobj and jsobj["api_name"] == "timeout_check": self.logger.log("info", "Execution timeout triggered : %s ..." % (jsobj["Date"])) self.logger.log_to_gui("Timeout", "Execution timeout triggered. Aborting FriSpy..") self.Controller_cleaup(pid) return #sys.exit(1) if "api_retval" in jsobj and jsobj["api_retval"] == "warn": self.logger.log("warn", "%s" % (jsobj["api_arguments"])) else: # Collect the events self.logger.log ("",jsobj) jsobj["process_id"] = str(pid) #print(self._sessions) if self.is_ready_to_process(jsobj): # Enrich the events from CoreEngine argument_report_basic_json_obj = {} argument_report_complete_json_obj = {} (argument_report_basic_json_obj, argument_report_complete_json_obj) = self.report_generator.add_behavior_to_report(jsobj) msg_label = "Event" msg_body = None if self.capture_basic_behavior and argument_report_basic_json_obj: msg_body = json.dumps(argument_report_basic_json_obj) if self.capture_complete_behavior and argument_report_complete_json_obj: msg_body = json.dumps(argument_report_complete_json_obj) # Send events to FriSpyGUI if msg_body: self.msmqueue_event.open_queue(2, 0)#MQ_SEND_ACCESS self.msmqueue_event.send_to_queue(msg_label, msg_body) self.msmqueue_event.close_queue() except Exception as e: self.logger.log("error","Exception: on_message : %s : %s: %s " % (str(e), str(jsobj), str(traceback.print_exc(file=sys.stdout)))) self.Controller_cleaup(pid) sys.exit(1) def Controller_cleaup(self, pid): # Terminate the processes print(self.process_list) for pid in self.process_list: if pid: try: os.kill(pid,signal.SIGTERM) #self.process_list.remove(pid) except Exception as e: self.logger.log("Error", "%s" % (str(e))) #spass if self._reactor.is_running(): self._reactor.stop() self._reactor.cancel_all() self._stop_requested.set()