Ejemplo n.º 1
0
    def _test_generate_results(self, indicator_name, dataset_name, expression,
                               source):

        # grab the first base_year_data in results_manager/simulation_runs and
        # fetch the year for it
        base_year = self.project.find(
            "results_manager/simulation_runs/run[@name='base_year_data']/end_year"
        )
        if base_year is None:
            return False, "Project doesn't have any base year data to check against"

        start_year = int(base_year.text)
        result_generator = OpusResultGenerator(self.project)
        result_generator.set_data(source_data_name='base_year_data',
                                  indicator_name=indicator_name,
                                  dataset_name=dataset_name,
                                  years=[
                                      start_year,
                                  ],
                                  indicator_definition=(expression, source))

        interface = IndicatorFrameworkInterface(self.project)
        src_data = interface.get_source_data(source_data_name='base_year_data',
                                             years=[
                                                 start_year,
                                             ])
        SimulationState().set_current_time(start_year)
        SimulationState().set_cache_directory(src_data.cache_directory)
        SessionConfiguration(
            new_instance=True,
            package_order=src_data.dataset_pool_configuration.package_order,
            in_storage=AttributeCache())

        dataset = SessionConfiguration().get_dataset_from_pool(dataset_name)
        if isinstance(dataset, InteractionDataset):
            #create a subset if its an interaction dataset...
            dataset_arguments = {
                'index1':
                numpy.random.randint(0, dataset.dataset1.size(), size=100),
                'index2':
                numpy.random.randint(0, dataset.dataset2.size(), size=100)
            }
            SessionConfiguration().delete_datasets()
            dataset = SessionConfiguration().get_dataset_from_pool(
                dataset_name, dataset_arguments=dataset_arguments)

        try:
            dataset.compute_variables(names=[expression])
            return True, None
        except Exception, e:
            type, value, tb = sys.exc_info()
            stack_dump = ''.join(traceback.format_exception(type, value, tb))
            errors = "{}\n\n{}".format(e, stack_dump)
            return False, errors
Ejemplo n.º 2
0
    def _test_generate_results(self, indicator_name, dataset_name, expression, source):

        # grab the first base_year_data in results_manager/simulation_runs and
        # fetch the year for it
        base_year = self.project.find("results_manager/simulation_runs/run[@name='base_year_data']/end_year")
        if base_year is None:
            return False, "Project doesn't have any base year data to check against"

        start_year = int(base_year.text)
        result_generator = OpusResultGenerator(self.project)
        result_generator.set_data(
               source_data_name = 'base_year_data',
               indicator_name = indicator_name,
               dataset_name = dataset_name,
               years = [start_year,],
               indicator_definition = (expression, source))

        interface = IndicatorFrameworkInterface(self.project)
        src_data = interface.get_source_data(source_data_name = 'base_year_data', years = [start_year,])
        SimulationState().set_current_time(start_year)
        SimulationState().set_cache_directory(src_data.cache_directory)
        SessionConfiguration(
            new_instance = True,
            package_order = src_data.dataset_pool_configuration.package_order,
            in_storage = AttributeCache())


        dataset = SessionConfiguration().get_dataset_from_pool(dataset_name)
        if isinstance(dataset,InteractionDataset):
            #create a subset if its an interaction dataset...
            dataset_arguments = {
                 'index1':numpy.random.randint(0,dataset.dataset1.size(), size=100),
                 'index2':numpy.random.randint(0,dataset.dataset2.size(), size=100)
            }
            SessionConfiguration().delete_datasets()
            dataset = SessionConfiguration().get_dataset_from_pool(dataset_name,
                                                                   dataset_arguments = dataset_arguments)

        try:
            dataset.compute_variables(names = [expression])
            return True, None
        except Exception, e:
            type, value, tb = sys.exc_info()
            stack_dump = ''.join(traceback.format_exception(type, value, tb))
            errors = "{}\n\n{}".format(e, stack_dump)
            return False, errors
 def __init__(self, project, ignore_cache=False):
     self.project = project
     # Dummy callbacks
     self.finishedCallback = lambda x: ()
     self.errorCallback = lambda x: ()
     self.guiElement = None
     self.cache_directory = None
     self.firstRead = True
     self.interface = IndicatorFrameworkInterface(project)
     self.computed_indicators = []
     self.ignore_cache = ignore_cache
Ejemplo n.º 4
0
    def on_pb_urbancanvas_clicked(self):
        
        run_name = self.current_run
        indicator_name = self.current_indicator
        indicator_dataset = self.current_indicator_dataset
        if indicator_dataset != 'parcel':
            MessageBox.information(mainwindow = self, text = 'Not a parcel variable. Only parcel variables can be sent to UrbanCanvas')
        else:
            start_year = int(self.current_year)
            end_year = start_year

            if run_name is None or indicator_name is None or start_year is None:
                return

            key = (run_name, indicator_name, start_year)

            self.pb_urbancanvas.setText('Sending to UrbanCanvas...')

            indicator_nodes = get_available_indicator_nodes(self.project)

            dataset = None
            for indicator_node in indicator_nodes:
                ind_dataset, name = get_variable_dataset_and_name(indicator_node)
                if name == indicator_name and ind_dataset == indicator_dataset:
                    dataset = ind_dataset
                    break

            if dataset is None:
                raise Exception('Could not find dataset for indicator %s' % indicator_name)

            table_params = {
                'name': None,
                'output_type' : 'tab',
                'indicators' : [indicator_name],
            }
            expression_library = self.project.xml_config.get_expression_library()
            expression = expression_library[(dataset,name)]
            logger.log_note(expression)
            
            base_year = end_year
            project_name = self.project.name
            opus_data_path = self.project.xml_config.get_opus_data_path()
            logger.log_note(base_year)
            logger.log_note(project_name)
            logger.log_note(opus_data_path)
            interface = IndicatorFrameworkInterface(self.project)
            source_data = interface.get_source_data(
                                 source_data_name = run_name,
                                 years = [end_year,]
            )
            cache = os.path.join(source_data.cache_directory,str(end_year))
            logger.log_note(cache)
            storage = StorageFactory().get_storage('flt_storage',storage_location=cache)
            dataset_pool = DatasetPool(storage=storage, package_order=[project_name,'urbansim_parcel','urbansim','opus_core'])
            parcels = dataset_pool.get_dataset('parcel')
            parcel_ids = pd.Series(parcels.get_attribute('parcel_id'))
            values = pd.Series(parcels.compute_variables([expression],dataset_pool=dataset_pool).astype('float'))
            parcels = pd.DataFrame({"parcel_id":parcel_ids,"vl_values":values})
            #parcels.set_index(keys='parcel_id',inplace=True)
            #parcels["vl_values"][parcels["vl_values"]==0] = np.nan
            parcels = parcels[parcels["vl_values"]>0]
            
            os.chdir(os.path.join(opus_data_path,project_name))
            parcels.to_csv('results_browser_indicator.csv',index=False)
            #np.savez('results_browser_indicator',parcel_id=parcels.vl_values.index.values.astype('int32'),values=parcels.vl_values.values.astype('int32'))
            
            ##############UNCOMMENT IF WEBSERVICE IS DESIRED
            # parcels.save('variable_library.pkl') ##I believe 'save' was just deprectated in pandas- its now to_pickle or some such thing... change this later
            # web_service_path = os.path.join(os.getenv("OPUS_HOME"),'src',project_name,'scripts','web_service.py')
            # logger.log_note(web_service_path)
            # p = subprocess.Popen([sys.executable,web_service_path])
            # MessageBox.information(mainwindow = self, text = 'Click OK when done viewing in UrbanCanvas')
            # p.kill()
            # self.pb_urbancanvas.setText('View in UrbanCanvas')
            
            MessageBox.information(mainwindow = self, text = 'Variable exported to the project data directory for viewing in UrbanCanvas')
            self.pb_urbancanvas.setText('View in UrbanCanvas')
Ejemplo n.º 5
0
    def on_pb_urbancanvas_clicked(self):

        run_name = self.current_run
        indicator_name = self.current_indicator
        indicator_dataset = self.current_indicator_dataset
        if indicator_dataset != 'parcel':
            MessageBox.information(
                mainwindow=self,
                text=
                'Not a parcel variable. Only parcel variables can be sent to UrbanCanvas'
            )
        else:
            start_year = int(self.current_year)
            end_year = start_year

            if run_name is None or indicator_name is None or start_year is None:
                return

            key = (run_name, indicator_name, start_year)

            self.pb_urbancanvas.setText('Sending to UrbanCanvas...')

            indicator_nodes = get_available_indicator_nodes(self.project)

            dataset = None
            for indicator_node in indicator_nodes:
                ind_dataset, name = get_variable_dataset_and_name(
                    indicator_node)
                if name == indicator_name and ind_dataset == indicator_dataset:
                    dataset = ind_dataset
                    break

            if dataset is None:
                raise Exception('Could not find dataset for indicator %s' %
                                indicator_name)

            table_params = {
                'name': None,
                'output_type': 'tab',
                'indicators': [indicator_name],
            }
            expression_library = self.project.xml_config.get_expression_library(
            )
            expression = expression_library[(dataset, name)]
            logger.log_note(expression)

            base_year = end_year
            project_name = self.project.name
            opus_data_path = self.project.xml_config.get_opus_data_path()
            logger.log_note(base_year)
            logger.log_note(project_name)
            logger.log_note(opus_data_path)
            interface = IndicatorFrameworkInterface(self.project)
            source_data = interface.get_source_data(source_data_name=run_name,
                                                    years=[
                                                        end_year,
                                                    ])
            cache = os.path.join(source_data.cache_directory, str(end_year))
            logger.log_note(cache)
            storage = StorageFactory().get_storage('flt_storage',
                                                   storage_location=cache)
            dataset_pool = DatasetPool(storage=storage,
                                       package_order=[
                                           project_name, 'urbansim_parcel',
                                           'urbansim', 'opus_core'
                                       ])
            parcels = dataset_pool.get_dataset('parcel')
            parcel_ids = pd.Series(parcels.get_attribute('parcel_id'))
            values = pd.Series(
                parcels.compute_variables(
                    [expression], dataset_pool=dataset_pool).astype('float'))
            parcels = pd.DataFrame({
                "parcel_id": parcel_ids,
                "vl_values": values
            })
            #parcels.set_index(keys='parcel_id',inplace=True)
            #parcels["vl_values"][parcels["vl_values"]==0] = np.nan
            parcels = parcels[parcels["vl_values"] > 0]

            os.chdir(os.path.join(opus_data_path, project_name))
            parcels.to_csv('results_browser_indicator.csv', index=False)
            #np.savez('results_browser_indicator',parcel_id=parcels.vl_values.index.values.astype('int32'),values=parcels.vl_values.values.astype('int32'))

            ##############UNCOMMENT IF WEBSERVICE IS DESIRED
            # parcels.save('variable_library.pkl') ##I believe 'save' was just deprectated in pandas- its now to_pickle or some such thing... change this later
            # web_service_path = os.path.join(os.getenv("OPUS_HOME"),'src',project_name,'scripts','web_service.py')
            # logger.log_note(web_service_path)
            # p = subprocess.Popen([sys.executable,web_service_path])
            # MessageBox.information(mainwindow = self, text = 'Click OK when done viewing in UrbanCanvas')
            # p.kill()
            # self.pb_urbancanvas.setText('View in UrbanCanvas')

            MessageBox.information(
                mainwindow=self,
                text=
                'Variable exported to the project data directory for viewing in UrbanCanvas'
            )
            self.pb_urbancanvas.setText('View in UrbanCanvas')
    def _visualize(self, args, cache_directory = None):

        self.visualizations = []
        indicators_to_visualize = {}
        interface = IndicatorFrameworkInterface(self.project)
        not_visualized = []
        
        #get common years
        years = set([])
        for indicator in self.indicators:
            years |= set(indicator['years'])

        source_data_objs = {}
        for indicator in self.indicators:
            indicator_name = indicator['indicator_name']
            source_data_name = indicator['source_data_name']
            dataset_name = indicator['dataset_name']
            
            if (self.indicator_type == 'mapnik_map' or self.indicator_type == 'mapnik_animated_map') and dataset_name not in self.spatial_datasets:
                not_visualized.append(indicator)
                continue

            if source_data_name not in source_data_objs:
                source_data = interface.get_source_data(
                                             source_data_name = source_data_name,
                                             years = list(years),
                                             cache_directory = cache_directory
                )
                source_data_objs[source_data_name] = source_data
            else:
                source_data = source_data_objs[source_data_name]

            indicator = interface.get_indicator(
                                         indicator_name = indicator_name,
                                         dataset_name = dataset_name)

            computed_indicator = interface.get_computed_indicator(indicator = indicator,
                                                                  source_data = source_data,
                                                                  dataset_name = dataset_name)
            computed_indicator.gui_indicator_name = indicator_name
            #####################
            #hack to get plausible primary keys...
            cache_directory = source_data.cache_directory
            _storage_location = os.path.join(cache_directory,
                                             'indicators',
                                             '_stored_data',
                                             repr(source_data.years[0]))

            storage = StorageFactory().get_storage(
                           type = 'flt_storage',
                           storage_location = _storage_location)
            cols = storage.get_column_names(
                        table_name = dataset_name)

            primary_keys = [col for col in cols if col.find('_id') != -1]
            computed_indicator.primary_keys = primary_keys
            ##################

            name = computed_indicator.get_file_name(
                suppress_extension_addition = True)

            indicators_to_visualize[name] = computed_indicator

        viz_args = {}

        if self.indicator_type == 'mapnik_map':
            viz_type = self.indicator_type
        elif self.indicator_type == 'mapnik_animated_map':
            viz_type = self.indicator_type
        elif self.indicator_type == 'matplotlib_chart':
            viz_type = self.indicator_type
        elif self.indicator_type == 'tab':
            viz_type = 'table'
            if 'output_style' not in viz_args:
                viz_args['output_style'] = Table.ALL
            viz_args['output_type'] = 'tab'
        elif self.indicator_type == 'table_esri':
            viz_type = 'table'
            if 'output_style' not in viz_args:
                viz_args['output_style'] = Table.ALL
            viz_args['output_type'] = 'esri'
        elif self.indicator_type == 'table_per_year':
            viz_type = 'table'
            if 'output_style' not in viz_args:
                viz_args['output_style'] = Table.PER_YEAR
            viz_args['output_type'] = 'tab'
        elif self.indicator_type == 'table_per_attribute':
            viz_type = 'table'
            if 'output_style' not in viz_args:
                viz_args['output_style'] = Table.PER_ATTRIBUTE
            viz_args['output_type'] = 'tab'

        viz_args.update(self.kwargs)
        viz_args.update(args)

#        try:
#            import pydevd;pydevd.settrace()
#        except:
#            pass

        viz_factory = VisualizationFactory()
        self.visualizations = viz_factory.visualize(
                                  indicators_to_visualize = indicators_to_visualize.keys(),
                                  computed_indicators = indicators_to_visualize,
                                  visualization_type = viz_type, **viz_args)

        if self.visualizations is None:
            self.visualizations = []

        return not_visualized