def run(self):
        """
        run metrics calculations and then  minimum spanning forest algorithm
        on inputs and write output based on configuration
        """

        # make output dir if not exists
        if not os.path.exists(self.output_directory):
            os.makedirs(self.output_directory)

        metric_config = json.load(open(
            self.config['metric_model_parameters_file']))
        # read in metrics and setup dataset_store
        demand_proj = csv_projection(self.config['demand_nodes_file'])
        target_path = os.path.join(self.output_directory, "dataset.db")
        self.store = dataset_store.create(target_path,
            self.config['demand_nodes_file'])

        metric_model = metric.getModel(self.config['metric_model'])
        metric_vbobs = self._run_metric_model(metric_model, metric_config)
        demand_nodes = self._get_demand_nodes(input_proj=demand_proj)
        existing, msf = self._build_network(demand_nodes)
        self._store_networks(msf, existing)
        metric_vbobs = self._update_metrics(metric_model, metric_vbobs)
        self._save_output(metric_vbobs, metric_config, metric_model)
Exemple #2
0
    def run(self):
        """
        run metrics calculations and then  minimum spanning forest algorithm
        on inputs and write output based on configuration
        """

        # make output dir if not exists
        if not os.path.exists(self.output_directory):
            os.makedirs(self.output_directory)

        metric_config = json.load(
            open(self.config['metric_model_parameters_file']))
        # read in metrics and setup dataset_store
        demand_proj = nio.read_csv_projection(self.config['demand_nodes_file'])
        target_path = os.path.join(self.output_directory, "dataset.db")
        self.store = dataset_store.create(target_path,
                                          self.config['demand_nodes_file'])

        log.info("running metric model {}".format(self.config['metric_model']))
        metric_model = metric.getModel(self.config['metric_model'])
        metric_vbobs = self._run_metric_model(metric_model, metric_config)
        demand_nodes = self._get_demand_nodes(input_proj=demand_proj)

        existing_networks = None

        if 'existing_networks' in self.config:
            existing_networks = networker_runner.load_existing_networks(
                prefix="grid-", **self.config['existing_networks'])

        network_algorithm = self.config['network_algorithm']

        min_node_count = 0
        single_network = True
        if 'network_parameters' in self.config:
            network_params = self.config['network_parameters']
            min_node_count = network_params.get('minimum_node_count', 0)
            single_network = network_params.get('single_network', True)

        header_type = VS.HEADER_TYPE_SECTION_OPTION
        if 'output_parameters' in self.config:
            output_params = self.config['output_parameters']
            header_type = output_params.get('header_type',
                                            VS.HEADER_TYPE_SECTION_OPTION)

        log.info("building network")
        msf = networker_runner.build_network(
            demand_nodes,
            existing=existing_networks,
            min_node_count=min_node_count,
            single_network=single_network,
            network_algorithm=network_algorithm,
            one_based=True)

        log.info("writing output")
        self._store_networks(msf, existing_networks)
        metric_vbobs = self._update_metrics(metric_model, metric_vbobs)
        self._save_output(metric_vbobs,
                          metric_config,
                          metric_model,
                          header_type=header_type)
    def run(self):
        """
        run metrics calculations and then  minimum spanning forest algorithm
        on inputs and write output based on configuration
        """

        # make output dir if not exists
        if not os.path.exists(self.output_directory):
            os.makedirs(self.output_directory)

        metric_config = json.load(open(
            self.config['metric_model_parameters_file']))
        # read in metrics and setup dataset_store
        demand_proj = nio.read_csv_projection(self.config['demand_nodes_file'])
        target_path = os.path.join(self.output_directory, "dataset.db")
        self.store = dataset_store.create(target_path,
                                          self.config['demand_nodes_file'])

        log.info("running metric model {}".format(self.config['metric_model']))
        metric_model = metric.getModel(self.config['metric_model'])
        metric_vbobs = self._run_metric_model(metric_model, metric_config)
        demand_nodes = self._get_demand_nodes(input_proj=demand_proj)

        existing_networks = None

        if 'existing_networks' in self.config:
            existing_networks = networker_runner.load_existing_networks(
                prefix="grid-",
                **self.config['existing_networks'])

        network_algorithm = self.config['network_algorithm']

        min_node_count = 0
        single_network = True
        if 'network_parameters' in self.config:
            network_params = self.config['network_parameters']
            min_node_count = network_params.get('minimum_node_count', 0)
            single_network = network_params.get('single_network', True)

        header_type = VS.HEADER_TYPE_SECTION_OPTION
        if 'output_parameters' in self.config:
            output_params = self.config['output_parameters']
            header_type = output_params.get('header_type',
                                            VS.HEADER_TYPE_SECTION_OPTION)

        log.info("building network")
        msf = networker_runner.build_network(
            demand_nodes,
            existing=existing_networks,
            min_node_count=min_node_count,
            single_network=single_network,
            network_algorithm=network_algorithm,
            one_based=True)

        log.info("writing output")
        self._store_networks(msf, existing_networks)
        metric_vbobs = self._update_metrics(metric_model, metric_vbobs)
        self._save_output(metric_vbobs, metric_config, metric_model,
                          header_type=header_type)
Exemple #4
0
 def run(self):
     # Prepare
     scenarioInput = self.input
     scenarioFolder = self.getFolder()
     expandPath = lambda x: os.path.join(scenarioFolder, x)
     # Register demographics
     print 'Registering demographics'
     nodesPath = expandPath('nodes')
     targetPath = self.getDatasetPath()
     sourcePath = expandPath(scenarioInput['demographic file name'])
     datasetStore = dataset_store.create(targetPath, sourcePath)
     datasetStore.saveNodesSHP(nodesPath)
     datasetStore.saveNodesCSV(nodesPath)
     # Apply metric
     print 'Applying metric'
     metricModel = metric.getModel(scenarioInput['metric model name'])
     metricConfiguration = scenarioInput['metric configuration']
     metricValueByOptionBySection = datasetStore.applyMetric(metricModel, metricConfiguration)
     # Build network
     print 'Building network'
     networkModel = network.getModel(scenarioInput['network model name'])
     networkConfiguration = scenarioInput['network configuration']
     networkValueByOptionBySection = datasetStore.buildNetwork(networkModel, networkConfiguration)
     # Update metric
     print 'Updating metric'
     metricValueByOptionBySection = datasetStore.updateMetric(metricModel, metricValueByOptionBySection)
     # Save output
     print 'Saving output'
     metric.saveMetricsCSV(expandPath('metrics-global'), metricModel, metricValueByOptionBySection)
     datasetStore.saveMetricsCSV(expandPath('metrics-local'), metricModel)
     datasetStore.saveSegmentsSHP(expandPath('networks-existing'), is_existing=True)
     datasetStore.saveSegmentsSHP(expandPath('networks-proposed'), is_existing=False)
     # Bundle
     store.zipFolder(scenarioFolder + '.zip', scenarioFolder)
     # Validate
     self.validateParameters()
     # Save output
     self.output = {
         'variables': { 
             'node': dict((str(x.id), dict(input=x.input, output=x.output)) for x in datasetStore.cycleNodes()),
             'metric': metricValueByOptionBySection,
             'network': networkValueByOptionBySection,
         }, 
         'statistics': { 
             'node': datasetStore.getNodeStatistics(), 
             'metric': datasetStore.getMetricStatistics(), 
             'network': datasetStore.getNetworkStatistics(), 
         }, 
         'warnings': store.popWarnings(self.id),
     }
     # Commit
     Session.commit()
    def test_scenarioRun(self):
        'for now, just make sure it runs'
        sourcePath = os.path.join(inputDataPath, "sample_demand_nodes.csv")
        # make output dir if not exists
        if not os.path.exists(outputDataPath):
            os.makedirs(outputDataPath)

        targetPath = os.path.join(outputDataPath, "dataset.db")
        datasetStore = dataset_store.create(targetPath, sourcePath)
        
        """
        // Sample Model Parameter JSON
        metricValueByOptionBySection = {
            'demand (household)': 
                {'household unit demand per household per year': 50}
        }
        """
        metricConfigPath = os.path.join(baseDirPath, "sample_metric_params.json")
        metricConfiguration = json.load(open(metricConfigPath, 'r'))

        """
        // Sample Model Parameter JSON
        networkValueByOptionBySection = {
            'algorithm': 
                {'minimum node count per subnetwork': 2}
        }
        """
        networkConfigPath = os.path.join(baseDirPath, "network_params.json")
        networkConfiguration = json.load(open(networkConfigPath, 'r'))

        # Run metric model
        metricModel = metric.getModel("mvMax5")
        metricValueByOptionBySection = datasetStore.applyMetric(metricModel, metricConfiguration)

        # Now that metrics (mvMax in particular) have been calculated
        # we can build the network
        networkModel = network.getModel("modKruskal")
        networkValueByOptionBySection = datasetStore.buildNetwork(networkModel, networkConfiguration)

        # Now that the network's been built (and the electrification option 
        # is chosen) run the aggregate calculations
        metricValueByOptionBySection = datasetStore.updateMetric(metricModel, metricValueByOptionBySection)

        metric.saveMetricsConfigurationCSV(os.path.join(outputDataPath, 'metrics-job-input'), metricConfiguration)
        metric.saveMetricsCSV(os.path.join(outputDataPath, 'metrics-global'), metricModel, metricValueByOptionBySection)
        datasetStore.saveMetricsCSV(os.path.join(outputDataPath, 'metrics-local'), metricModel)
        datasetStore.saveSegmentsSHP(os.path.join(outputDataPath, 'networks-proposed'), is_existing=False)
 def run(self):
     # Prepare
     scenarioInput = self.input
     scenarioFolder = self.getFolder()
     expandPath = lambda x: os.path.join(scenarioFolder, x)
     # Setup status reporting
     from time import localtime, strftime
     time_format = "%Y-%m-%d %H:%M:%S"
     
     # Register demographics
     Job.log("Registering demographics")
     print "%s Registering demographics" % strftime(time_format, localtime())
     nodesPath = expandPath('nodes')
     targetPath = self.getDatasetPath()
     sourcePath = expandPath(scenarioInput['demographic file name'])
     datasetStore = dataset_store.create(targetPath, sourcePath)
     datasetStore.saveNodesSHP(nodesPath)
     datasetStore.saveNodesCSV(nodesPath)
     # Apply metric
     Job.log("Applying metric")
     print "%s Applying metric" % strftime(time_format, localtime())
     metricModel = metric.getModel(scenarioInput['metric model name'])
     metricConfiguration = scenarioInput['metric configuration']
     metricValueByOptionBySection = datasetStore.applyMetric(metricModel, metricConfiguration)
     # Build network
     Job.log("Building network")
     print "%s Building network" % strftime(time_format, localtime())
     networkModel = network.getModel(scenarioInput['network model name'])
     networkConfiguration = scenarioInput['network configuration']
     networkValueByOptionBySection = datasetStore.buildNetwork(networkModel, networkConfiguration, jobLogger=Job)
     # Update metric
     Job.log("Updating metric")
     print "%s Updating metric" % strftime(time_format, localtime())
     metricValueByOptionBySection = datasetStore.updateMetric(metricModel, metricValueByOptionBySection)
     # Save output
     Job.log("Saving output")
     print "%s Saving output" % strftime(time_format, localtime())
     metric.saveMetricsConfigurationCSV(expandPath('metrics-job-input'), metricConfiguration)
     metric.saveMetricsCSV(expandPath('metrics-global'), metricModel, metricValueByOptionBySection)
     datasetStore.saveMetricsCSV(expandPath('metrics-local'), metricModel)
     datasetStore.saveSegmentsSHP(expandPath('networks-existing'), is_existing=True)
     datasetStore.saveSegmentsSHP(expandPath('networks-proposed'), is_existing=False)
     # Bundle
     store.zipFolder(scenarioFolder + '.zip', scenarioFolder)
     # Validate
     self.validateParameters()
     # Save output
     self.output = {
         'variables': { 
             'node': dict((str(x.id), dict(input=x.input, output=x.output)) for x in datasetStore.cycleNodes()),
             'metric': metricValueByOptionBySection,
             'network': networkValueByOptionBySection,
         }, 
         'statistics': { 
             'node': datasetStore.getNodeStatistics(), 
             'metric': datasetStore.getMetricStatistics(), 
             'network': datasetStore.getNetworkStatistics(), 
         }, 
         'warnings': store.popWarnings(self.id),
     }
     # Commit
     Session.commit()
    parser.add_argument("-t", "--header-type", 
                        choices=[VS.HEADER_TYPE_SECTION_OPTION, 
                                 VS.HEADER_TYPE_ALIAS], 
                        default=VS.HEADER_TYPE_ALIAS,
                        help="the output file header field name type")                       
 
                       
    args = parser.parse_args()

    # make output dir if not exists
    outputDataPath = args.output_path
    if not os.path.exists(outputDataPath):
        os.makedirs(outputDataPath)

    targetPath = os.path.join(outputDataPath, "dataset.db")
    datasetStore = dataset_store.create(targetPath, args.input_nodes_file)

    # setup models
    metricModel = metric.getModel(args.metric_model_name)
    metricConfiguration = json.load(args.metric_model_params)
    networkModel = network.getModel(args.network_model_name)
    networkConfiguration = json.load(args.network_model_params)

    # Run metric model
    metricValueByOptionBySection = datasetStore.applyMetric(metricModel, metricConfiguration)

    # Now that metrics (mvMax in particular) have been calculated
    # we can build the network
    networkValueByOptionBySection = datasetStore.buildNetwork(networkModel, networkConfiguration)

    # Now that the network's been built (and the electrification option