class Module: def __init__(self, user, scenario, extra_dict_arguments=None): self.__user = user self.__scenario = scenario self.__base_scenario = extra_dict_arguments["base_scenario"] def run(self): self.__general_calculus() self.__limits = {"inferior": 0, "superior": 0} self.__mmu_limit_offset() self.__pop_density_threads() self.__area_density_threads() def __general_calculus(self): self.__Indicator = Indicator(self.__user) vacuum(self.__Indicator.get_uri(), "mmu") self.__urbper_base_footprint_area() self.__urbper_base_total_population() self.__urbper_base_footprint_km2() def __urbper_base_footprint_area(self): try: error = True count = 0 while error and count < 3: try: self.__Indicator = Indicator(self.__user) query = """select urbper_indicator_footprints_area({scenario})""".format( scenario=self.__scenario) LogEvents("footprints area", "footprints area module started: " + query, self.__scenario, self.__user) conn = psycopg2.connect(self.__Indicator.get_uri()) cursor = conn.cursor( cursor_factory=psycopg2.extras.DictCursor) old_isolation_level = conn.isolation_level conn.set_isolation_level(0) cursor.execute(query) conn.commit() conn.set_isolation_level(old_isolation_level) except Exception as e: conn.rollback() conn.close() error = True count += 1 time.sleep(randint(1, 3)) LogEvents( "footprints area", "footprints area module failed " + str(count) + ": " + str(e), self.__scenario, self.__user, True) else: error = False conn.close() LogEvents("footprints area", "footprints area module finished", self.__scenario, self.__user) except Exception as e: LogEvents("footprints area", "unknown error+ " + str(e), self.__scenario, self.__user, True) def __urbper_base_footprint_km2(self): try: error = True count = 0 while error and count < 3: try: self.__Indicator = Indicator(self.__user) query = """select urbper_indicator_footprint_km2({scenario})""".format( scenario=self.__scenario) LogEvents("footprints area", "footprints area module started: " + query, self.__scenario, self.__user) conn = psycopg2.connect(self.__Indicator.get_uri()) cursor = conn.cursor( cursor_factory=psycopg2.extras.DictCursor) old_isolation_level = conn.isolation_level conn.set_isolation_level(0) cursor.execute(query) conn.commit() conn.set_isolation_level(old_isolation_level) except Exception as e: conn.rollback() conn.close() error = True count += 1 time.sleep(randint(1, 3)) LogEvents( "footprint_km2", "footprint_km2 module failed " + str(count) + ": " + str(e), self.__scenario, self.__user, True) else: error = False conn.close() LogEvents("footprint_km2", "footprint_km2 module finished", self.__scenario, self.__user) except Exception as e: LogEvents("footprint_km2", "unknown error+ " + str(e), self.__scenario, self.__user, True) def __urbper_base_total_population(self): try: error = True count = 0 while error and count < 3: self.__Indicator = Indicator(self.__user) db = self.__Indicator.get_up_calculator_connection() try: with transaction.atomic(): self.__Indicator = Indicator(self.__user) db = self.__Indicator.get_up_calculator_connection() query = """select urbper_indicator_base_calculus_total_population({scenario},{base_scenario})""".format( scenario=self.__scenario, base_scenario=self.__base_scenario) LogEvents("total population", "total population module started: " + query, self.__scenario, self.__user) db.execute(query) except Exception as e: db.close() error = True count += 1 time.sleep(randint(1, 3)) LogEvents( "total population", "total population module failed " + str(count) + ": " + str(e), self.__scenario, self.__user, True) else: error = False db.close() LogEvents("total population", "total population module finished", self.__scenario, self.__user) except Exception as e: LogEvents("total population", "unknown error " + str(e), self.__scenario, self.__user, True) def __mmu_limit_offset(self): try: self.__Indicator = Indicator(self.__user) db = self.__Indicator.get_up_calculator_connection() try: self.__limits["inferior"] = mmu.objects.filter( scenario_id=self.__scenario).aggregate( Min('mmu_id'))["mmu_id__min"] self.__limits["superior"] = mmu.objects.filter( scenario_id=self.__scenario).aggregate( Max('mmu_id'))["mmu_id__max"] except Exception as e: LogEvents("squares max min", "unknown error " + str(e), self.__scenario, self.__user, True) db.close() except Exception as e: LogEvents("squares max min", "unknown error " + str(e), self.__scenario, self.__user, True) def __pop_density_threads(self): self.__scenario_t = {} self.__scenario_t["limit"] = 0 self.__scenario_t["offset"] = 0 inferior = self.__limits["inferior"] superior = self.__limits["superior"] _threads = {} self.max_threads = min( self.__Indicator.get_max_threads(), int( math.ceil( (superior - inferior) / self.__Indicator.get_max_rows()))) num_partitions = self.max_threads partition_size = (int)(math.ceil( (superior - inferior) / self.max_threads)) for h in range(0, num_partitions): self.__scenario_t["offset"] = inferior self.__scenario_t["limit"] = self.__scenario_t["offset"] + \ partition_size inferior = self.__scenario_t["limit"] + 1 _threads[h] = threading.Thread( target=self.__ModulePopulationDensity, args=(self.__scenario, self.__scenario_t["offset"], self.__scenario_t["limit"])) for process in _threads: _threads[process].start() for process in _threads: if _threads[process].is_alive(): _threads[process].join() def __area_density_threads(self): self.__scenario_t = {} self.__scenario_t["limit"] = 0 self.__scenario_t["offset"] = 0 inferior = self.__limits["inferior"] superior = self.__limits["superior"] _threads = {} self.max_threads = min( self.__Indicator.get_max_threads(), int( math.ceil( (superior - inferior) / self.__Indicator.get_max_rows()))) num_partitions = self.max_threads partition_size = (int)(math.ceil( (superior - inferior) / self.max_threads)) for h in range(0, num_partitions): self.__scenario_t["offset"] = inferior self.__scenario_t["limit"] = self.__scenario_t["offset"] + \ partition_size inferior = self.__scenario_t["limit"] + 1 _threads[h] = threading.Thread(target=self.__ModuleAreaDensity, args=(self.__scenario, self.__scenario_t["offset"], self.__scenario_t["limit"])) for process in _threads: _threads[process].start() for process in _threads: if _threads[process].is_alive(): _threads[process].join() def __ModuleAreaDensity(self, scenario_id, offset=0, limit=0): try: error = True count = 0 while error and count < 3: self.__Indicator = Indicator(self.__user) db = self.__Indicator.get_up_calculator_connection() try: query = """select urbper_indicator_area_den_avg({scenario},{offset},{limit})""".format( scenario=scenario_id, offset=offset, limit=limit) LogEvents("area density avg", "area density avg module started: " + query, scenario_id, self.__user) conn = psycopg2.connect(self.__Indicator.get_uri()) cursor = conn.cursor( cursor_factory=psycopg2.extras.DictCursor) old_isolation_level = conn.isolation_level conn.set_isolation_level(0) cursor.execute(query) conn.commit() conn.set_isolation_level(old_isolation_level) except Exception as e: error = True count += 1 time.sleep(randint(1, 3)) LogEvents( "area density avg", "area density avg module failed " + str(count) + ": " + str(e), scenario_id, self.__user, True) conn.close() else: error = False LogEvents("area density avg", "area density avg module finished", scenario_id, self.__user) conn.close() except Exception as e: LogEvents("Running scenarios", "Unknown error " + str(e), scenario_id, self.__user, True) def __ModulePopulationDensity(self, scenario_id, offset=0, limit=0): try: error = True count = 0 while error and count < 3: self.__Indicator = Indicator(self.__user) db = self.__Indicator.get_up_calculator_connection() try: query = """select urbper_indicator_pop_den_avg({scenario},{offset},{limit})""".format( scenario=scenario_id, offset=offset, limit=limit) LogEvents( "population density avg", "population density avg module started: " + query, scenario_id, self.__user) conn = psycopg2.connect(self.__Indicator.get_uri()) cursor = conn.cursor( cursor_factory=psycopg2.extras.DictCursor) old_isolation_level = conn.isolation_level conn.set_isolation_level(0) cursor.execute(query) conn.commit() conn.set_isolation_level(old_isolation_level) except Exception as e: error = True count += 1 LogEvents( "population density avg", "population density avg module failed " + str(count) + ": " + str(e), scenario_id, self.__user, True) conn.close() else: error = False LogEvents("population density avg", "population density avg module finished", scenario_id, self.__user) conn.close() except Exception as e: LogEvents("Running scenarios", "Unknown error " + str(e), scenario_id, self.__user, True)
class Module: def __init__(self, user, scenario, extra_dict_arguments=None): self.__user = user self.__scenario = scenario self.__base_scenario = extra_dict_arguments["base_scenario"] def run(self): query = """ delete from mmu_info where mmu_id in ( select mmu_id from mmu where scenario_id={scenario} and not st_contains( ( select location from footprint inner join classification on classification.name=footprint.name where classification.category='footprint' and classification.fclass in ('political_boundary','study_area') and scenario_id={scenario} ),mmu.location ) ); delete from mmu where scenario_id={scenario} and not st_contains( ( select location from footprint inner join classification on classification.name=footprint.name where classification.category='footprint' and classification.fclass in ('political_boundary','study_area') and scenario_id={scenario} ),mmu.location ); """.format(scenario=self.__scenario) self.__Indicator = Indicator(self.__user) db = self.__Indicator.get_up_calculator_connection() db.execute(query) db.close() self.__evaluateRoadsBuffers() def __evaluateRoadsBuffers(self): import psycopg2 import psycopg2.extras try: error = True count = 0 while error and count < 3: try: query = """ select urbper_buffer_roads({scenario}) """.format(scenario=self.__scenario) LogEvents("roads buffers", "roads buffer module started: " + query, self.__scenario, self.__user) conn = psycopg2.connect(self.__Indicator.get_uri()) cursor = conn.cursor( cursor_factory=psycopg2.extras.DictCursor) old_isolation_level = conn.isolation_level conn.set_isolation_level(0) cursor.execute(query) conn.commit() conn.set_isolation_level(old_isolation_level) except Exception as e: error = True count += 1 time.sleep(randint(1, 3)) LogEvents( "roads buffers", "roads buffers module failed " + str(count) + ": " + str(e), self.__scenario, self.__user) else: error = False LogEvents("roads buffers", "roads buffers module finished", self.__scenario, self.__user) except Exception as e: LogEvents("roads buffers", "unknown error " + str(e), self.__scenario, self.__user)
class Module: def __init__(self, user, scenario, extra_dict_arguments=None): self.__user = user self.__scenario = scenario def run(self): try: self.__Indicator = Indicator(self.__user) self.__db = self.__Indicator.get_up_calculator_connection() vacuum(self.__Indicator.get_uri(), "mmu") self.__db.close() amenity_classes_set = self.__getAmentityClassess() error = True count = 0 while error and count < 3: self.__Indicator = Indicator(self.__user) db = self.__Indicator.get_up_calculator_connection() try: for amenity in amenity_classes_set: amenity_classes = [] amenity_classes.append(amenity) amenity_classes_array = "'{" + ",".join( amenity_classes) + "}'" query = """ select urbper_indicator_pop_amenity_prox({scenario},'pop_prox_{fclass}'::varchar(30),'{fclass}_proximity'::varchar(30),{fclass_array}) """.format(scenario=self.__scenario, fclass=amenity, fclass_array=amenity_classes_array) LogEvents( amenity + " proximity", amenity + " proximity module started: " + query, self.__scenario, self.__user) with transaction.atomic(): db.execute(query) except Exception as e: error = True count += 1 time.sleep(randint(1, 3)) db.close() LogEvents( "amenity proximity", "amenity proximity module failed " + str(count) + ": " + str(e), self.__scenario, self.__user) else: error = False db.close() LogEvents("amenity proximity", "amenity proximity module finished", self.__scenario, self.__user) except Exception as e: LogEvents("amenity proximity", "unknown error " + str(e), self.__scenario, self.__user) def __getAmentityClassess(self): try: error = True count = 0 while error and count < 3: try: with connection.cursor() as cursor: query = """select distinct assumptions.name from amenities inner join classification on classification."name" = amenities.fclass inner join assumptions on assumptions.name = classification.fclass where classification.category='amenities' and amenities.scenario_id=assumptions.scenario_id and assumptions.scenario_id={}""".format( self.__scenario) LogEvents( "amenity proximity", "amenity proximity classes started: " + query, self.__scenario, self.__user) cursor.execute(query) results_set = [ list(row)[0] for row in cursor.fetchall() ] print(results_set) results = results_set except Exception as e: error = True count += 1 time.sleep(randint(1, 3)) LogEvents( "amenity proximity", "amenity proximity classes failed " + str(count) + ": " + str(e), self.__scenario, self.__user) else: error = False LogEvents("amenity proximity", "amenity proximity classes finished", self.__scenario, self.__user) return results except Exception as e: LogEvents("amenity proximity", "unknown error " + str(e), self.__scenario, self.__user)
class Module: def __init__(self, user, scenario, extra_dict_arguments=None): self.__user = user self.__scenario = scenario self.__base_scenario = extra_dict_arguments["base_scenario"] def run(self): query = """ delete from mmu_info where mmu_id in ( select mmu_id from mmu where not st_contains( ( select location from footprint inner join classification on classification.name=footprint.name where classification.category='footprint' and classification.fclass in ('political_boundary','study_area') and scenario_id={scenario} ),mmu.location ) ); delete from mmu where not st_contains( ( select location from footprint inner join classification on classification.name=footprint.name where classification.category='footprint' and classification.fclass in ('political_boundary','study_area') and scenario_id={scenario} ),mmu.location ); """.format(scenario=self.__scenario) self.__Indicator = Indicator(self.__user) db = self.__Indicator.get_up_calculator_connection() db.execute(query) db.close() self.__EvaluateJobsBuffer() def __EvaluateJobsBuffer(self): self.__limits = {"inferior": 0, "superior": 0} self.__jobs_limit_offset() self.__jobs_preprocessing_threads() def __jobs_limit_offset(self): try: try: # get the max an min of pk self.__limits["inferior"] = jobs.objects.filter( scenario_id=self.__scenario).aggregate( Min('jobs_id'))["jobs_id__min"] self.__limits["superior"] = jobs.objects.filter( scenario_id=self.__scenario).aggregate( Max('jobs_id'))["jobs_id__max"] except Exception as e: LogEvents("jobs max min", "unknown error " + str(e), self.__scenario, self.__user, True) except Exception as e: LogEvents("jobs max min", "unknown error " + str(e), self.__scenario, self.__user, True) def __jobs_preprocessing_threads(self): self.__scenario_t = {} self.__scenario_t["limit"] = 0 self.__scenario_t["offset"] = 0 inferior = self.__limits["inferior"] superior = self.__limits["superior"] _threads = {} self.max_threads = min( self.__Indicator.get_max_threads(), int( math.ceil( (superior - inferior) / self.__Indicator.get_max_rows()))) num_partitions = self.max_threads partition_size = (int)(math.ceil( (superior - inferior) / self.max_threads)) for h in range(0, num_partitions): self.__scenario_t["offset"] = inferior self.__scenario_t["limit"] = self.__scenario_t["offset"] + \ partition_size inferior = self.__scenario_t["limit"] + 1 _threads[h] = threading.Thread(target=self.__ModuleJobsDensity, args=(self.__scenario, self.__scenario_t["offset"], self.__scenario_t["limit"])) for process in _threads: _threads[process].start() for process in _threads: if _threads[process].is_alive(): _threads[process].join() def __ModuleJobsDensity(self, scenario_id, offset=0, limit=0): import psycopg2 import psycopg2.extras try: error = True count = 0 while error and count < 3: try: query = """select urbper_buffer_job_and_job_density({scenario},{offset},{limit})""".format( scenario=scenario_id, offset=offset, limit=limit) LogEvents( "job buffers and density", "job buffers and density module started: " + query, scenario_id, self.__user) conn = psycopg2.connect(self.__Indicator.get_uri()) cursor = conn.cursor( cursor_factory=psycopg2.extras.DictCursor) old_isolation_level = conn.isolation_level conn.set_isolation_level(0) cursor.execute(query) conn.commit() conn.set_isolation_level(old_isolation_level) except Exception as e: error = True count += 1 LogEvents( "job buffers and density ", "job buffers and density module failed " + str(count) + ": " + str(e), scenario_id, self.__user, True) else: error = False # db.commit() LogEvents("job buffers and density ", "job buffers and density module finished", scenario_id, self.__user) except Exception as e: LogEvents("Running scenarios", "Unknown error " + str(e), scenario_id, self.__user, True)
class EvaluateScenario: """ Urban Performance scenario evaluation Attributes: user: user id. scenarios: List of scenarios to be evaluated. indicators: List of indicators to be evaluated. """ def __init__(self, user, scenarios, indicators): LogEvents("Start scenario", "Starting all scenarios proccesing", -1, 1) self.user = user self.indicator = Indicator(self.user) if scenarios != "": self.scenarios = [ int(scenario.strip()) for scenario in scenarios.split('_') ] self.base_scenario = self.__get_base_scenario(self.scenarios[0]) if self.base_scenario in self.scenarios: if len(self.scenarios) > 1: self.scenarios.append( self.scenarios.pop( self.scenarios.index( self.scenarios.index(self.base_scenario)))) if indicators != "": self.indicators = indicators.split('_') indicator_param = [] for indicator_set in self.indicators: indicator_param.append(indicator_set.strip()) order = Modules.objects.filter( module__in=indicator_param).values("module").order_by('order') self.indicators = [] for module_in in order: self.indicators.append(module_in["module"]) self.copiados = dict(buffers=True, policies=True, amenities=[], transit=[], cycle=[], footprint=[], roads=[]) """ run_scenarios method executes all the modules that the user's role has access to. """ def run_scenarios(self): try: LogEvents("Start scenario", "Starting all scenarios proccesing", self.base_scenario, self.user) last = 0 # Evaluate the scenario(s) for scenario in self.scenarios: last = scenario LogEvents( "Start scenario", "The modules to be use are: " + str(",".join(self.indicators)), scenario, self.user) ########################################## for module in self.indicators: module_r = "plup.indicators." + module + "." + module try: plugin = importlib.import_module(module_r, ".") module = plugin.Module( self.user, scenario, dict(base_scenario=self.base_scenario)) module.run() except Exception as e: print("E", e) LogEvents("Finish scenario", "The scenario have been processed", scenario, self.user) db = config.get_db() vacuum(self.indicator.get_uri(), "mmu") db.close() db = config.get_db() vacuum(self.indicator.get_uri(), True) db.close() LogEvents("All scenarios Finished", "All scenarios have been processed", last, self.user) except Exception as e: LogEvents("Unknown error", str(e), self.base_scenario, self.user) """ __get_base_scenario method finds the base scenario that were created for the provided city and country """ def __get_base_scenario(self, scenario): scenario_id = -1 try: db = config.get_db() try: query = """ select scenario_id from scenario ,( select location from scenario inner join footprint using (scenario_id) where footprint.name='study_area' and scenario_id={scenario} ) pb where is_base=1 and st_equals(pb.location, location) """.format(scenario=scenario) db.execute(query) scenarios = db.fetchall() scenario_id = scenarios[0][0] db.close() return scenario_id except Exception as e: LogEvents("An error happend while getting the base scenario", str(e), -1, sys.argv[1]) db.close() except Exception as e: LogEvents("Unknown error", str(e), -1, sys.argv[1]) return scenario_id """ __get_all_scenarios method finds all the scenarios that were created ofr the provided city and country """ def get_base_scenario(self): return self.__get_base_scenario()