def load_network(self, network_id, scenario_id): """ Load network and scenario from the server. """ try: network_id = int(network_id) except (TypeError, ValueError): network_id = self.network_id if network_id is None: raise HydraPluginError("No network specified.") try: scenario_id = int(scenario_id) except (TypeError, ValueError): scenario_id = self.scenario_id if scenario_id is None: raise HydraPluginError("No scenario specified.") self.network = self.connection.call( 'get_network', { 'network_id': int(network_id), 'include_data': 'Y', 'scenario_ids': [int(scenario_id)], 'template_id': None }) self.res_scenario = self.network.scenarios[0].resourcescenarios attrslist = self.connection.call('get_all_attributes', {}) for attr in attrslist: self.attrs.update({attr.id: attr.name})
def get_network_data(self, network_id, scenario_id): """ Retrieve the network, identify the parameters to set, set them and run the model. Then identify the results and set them back on the network. """ write_output("Retrieving Network") write_progress(2, self.num_steps) if network_id is not None: if scenario_id is None: raise HydraPluginError("A scenario ID must be specified.") #The network ID can be specified to get the network... try: network_id = int(network_id) network = self.connection.call( 'get_network', { 'network_id': network_id, 'scenario_ids': [int(scenario_id)] }) write_output("Network retrieved") except Exception, e: log.exception(e) raise HydraPluginError("Network %s not found." % network_id)
def export_network(): try: template_id = None exporter = GAMSexport(int(args.network), int(args.scenario), template_id,#int(args.template_id), args.output, link_export_flag, url=args.server_url) if args.template_id is not None: exporter.template_id = int(args.template_id) exporter.export_network() if args.start_date is not None and args.end_date is not None \ and args.time_step is not None: exporter.write_time_index(start_time=args.start_date, end_time=args.end_date, time_step=args.time_step) elif args.time_axis is not None: exporter.write_time_index(time_axis=args.time_axis) else: raise HydraPluginError('Time axis not specified.') exporter.export_data() exporter.write_file() except HydraPluginError, e: errors = [e.message] err = PluginLib.create_xml_response('GAMSexport', args.network, [args.scenario], errors = errors) print err sys.exit(0)
def parse_time_step(self, time_step, target='s'): """ Read in the time step and convert it to seconds. """ log.info("Parsing time step %s", time_step) # export numerical value from string using regex value = re.findall(r'\d+', time_step)[0] valuelen = len(value) try: value = float(value) except: HydraPluginError( "Unable to extract number of time steps (%s) from time step %s" % (value, time_step)) units = time_step[valuelen:].strip() period = get_time_period(units) log.info("Time period is %s", period) converted_time_step = self.units.convert(value, period, target) log.info("Time period is %s %s", converted_time_step, period) return float(converted_time_step), value, period
def run_gams_model(): try: log.info("Running GAMS model .....") cur_time=datetime.now() working_directory=os.path.dirname(args.gms_file) model = GamsModel(args.gams_path, working_directory) model.add_job(args.gms_file) model.run() log.info("Running GAMS model finsihed") # if result file is not provided, it looks for it automatically at GAMS WD if(args.gdx_file==None): log.info("Extract result file name.....") files_list=get_files_list(working_directory, '.gdx') for file_ in files_list: from dateutil import parser dt = parser.parse(files_list[file_]) delta= (dt-cur_time).total_seconds() if delta>0: args.gdx_file=working_directory+"\\"+file_ if(args.gdx_file==None): raise HydraPluginError('Result file is not provided/found.') except Exception as e: errors = [e.message] print "Error is: ", errors err = PluginLib.create_xml_response('GAMS_run_model', args.network, [args.scenario], errors = errors) print err sys.exit(0)
def fetch_project(self, project_id): """ If a project ID is not specified, a new one must be created to hold the incoming network. If an ID is specified, we must retrieve the project to make sure it exists. If it does not exist, then throw an error. Returns the project object so that the network can access it's ID. """ if project_id is not None: try: project = self.connection.call('get_project', {'project_id': int(project_id)}) log.info('Loading existing project (ID=%s)' % project_id) return project except RequestError: raise HydraPluginError("Project with ID %s not found" % project_id) #Using 'datetime.now()' in the name guarantees a unique project name. new_project = dict( name = "Hobbes Project created at %s" % (datetime.now()), description = \ "Default project created by the %s plug-in." % \ (self.__class__.__name__), ) saved_project = self.connection.call('add_project', {'project': new_project}) return saved_project
def get_template(self, template_id): if template_id is not None: template = self.connection.call('get_template', {'template_id':int(template_id)}) else: raise HydraPluginError("No template specified!") for t_type in template.types: self.type_name_map[t_type.name] = t_type
def get_time_axis(self, start_time, end_time, time_step, time_axis=None): """ Create a list of datetimes based on an start time, end time and time step. If such a list is already passed in, then this is not necessary. Often either the start_time, end_time, time_step is passed into an app or the time_axis is passed in directly. This function returns a time_axis in both situations. """ if time_axis is not None: actual_dates_axis = [] for t in time_axis: #If the user has entered the time_axis with commas, remove them. t = t.replace(',', '').strip() if t == '': continue actual_dates_axis.append(get_datetime(t)) return actual_dates_axis else: if start_time is None: raise HydraPluginError("A start time must be specified") if end_time is None: raise HydraPluginError("And end time must be specified") if time_step is None: raise HydraPluginError("A time-step must be specified") start_date = get_datetime(start_time) end_date = get_datetime(end_time) delta_t, value, units = self.parse_time_step(time_step) time_axis = [start_date] value = int(value) while start_date < end_date: #Months and years are a special case, so treat them differently if (units.lower() == "mon"): start_date = start_date + relativedelta(months=value) elif (units.lower() == "yr"): start_date = start_date + relativedelta(years=value) else: start_date += timedelta(seconds=delta_t) time_axis.append(start_date) return time_axis
def validate_plugin_xml(plugin_xml_file_path): log.info('Validating plugin xml file (%s).' % plugin_xml_file_path) try: with open(plugin_xml_file_path) as f: plugin_xml = f.read() except: raise HydraPluginError("Couldn't find plugin.xml.") try: plugin_xsd_path = os.path.expanduser(config.get('plugin', 'plugin_xsd_path')) log.info("Plugin Input xsd: %s", plugin_xsd_path) xmlschema_doc = etree.parse(plugin_xsd_path) xmlschema = etree.XMLSchema(xmlschema_doc) xml_tree = etree.fromstring(plugin_xml) except XMLSyntaxError, e: raise HydraPluginError("There is an error in your XML syntax: %s" % e)
def check_args(args): ''' check arguments which user input ''' try: int(args.network) except (TypeError, ValueError): raise HydraPluginError('No network is specified') try: int(args.scenario) except (TypeError, ValueError): raise HydraPluginError('No senario is specified') if args.output == None: raise HydraPluginError('No output file is specified') elif os.path.exists(os.path.dirname(args.output)) == False: raise HydraPluginError('output file directory: ' + os.path.dirname(args.output) + ', is not exist')
def convert_to_int(value, type): ''' convert value to integer it is used for both network id and scenario id ''' try: value = int(value) return value except: message = [type + " needs to be an integer, input is: " + value] raise HydraPluginError('Time axis not specified.')
def export_data(args): template_id = None if args.template_id is not None: template_id = int(args.template_id) exporter = Exporter(args, link_export_flag, steps) if args.start_date is not None and args.end_date is not None \ and args.time_step is not None: exporter.get_time_index(start_time=args.start_date, end_time=args.end_date, time_step=args.time_step) elif args.time_axis is not None: exporter.get_time_index(time_axis=args.time_axis) else: raise HydraPluginError('Time axis not specified.') exporter.export_network() exporter.save_file() return exporter.net
def fetch_project(self, project_id): """ If a project ID is not specified, a new one must be created to hold the incoming network. If an ID is specified, we must retrieve the project to make sure it exists. If it does not exist, then throw an error. Returns the project object so that the network can access it's ID. """ if project_id is not None: try: project = self.connection.call('get_project', {'project_id':project_id}) log.info('Loading existing project (ID=%s)' % project_id) return project except RequestError, e: log.exception(e) raise HydraPluginError("An error occurred retrieving project ID %s"%project_id)
def fetch_remote_network(self): """ Request the hobbes network from the hobbes server """ write_output("Fetching Network") write_progress(2, self.num_steps) net_response = requests.get( "http://cwn.casil.ucdavis.edu/network/get") #JSON Network #http://cwn.casil.ucdavis.edu/excel/create?prmname=SR_CLE #XLS if net_response.status_code != 200: raise HydraPluginError( "A connection error has occurred with status code: %s" % net_response.status_code) self.json_net = json.loads(net_response.content)
def get_time_index(self, start_time=None, end_time=None, time_step=None, time_axis=None): ''' get time index using either time axis provided or start, end time and time step ''' try: log.info("Writing time index") time_axis = self.get_time_axis(start_time, end_time, time_step, time_axis=time_axis) t = 1 for timestamp in time_axis: self.time_index[t] = timestamp t += 1 except Exception as e: raise HydraPluginError( "Please check time-axis or start time, end times and time step." )
def check_args(args): try: int(args.network) except (TypeError, ValueError): raise HydraPluginError('No network is specified') try: int(args.scenario) except (TypeError, ValueError): raise HydraPluginError('No senario is specified') if args.model_file is None: raise HydraPluginError('model file is not specifed') elif os.path.isfile(args.model_file)==False: raise HydraPluginError('model file: '+args.model_file+', is not existed') elif args.output==None: raise HydraPluginError('No output file specified') elif os.path.exists(os.path.abspath(args.output))==False: raise HydraPluginError('output file directory '+ os.path.dirname(args.output)+' does not exist') elif os.path.isfile(args.output)==False: raise HydraPluginError('output file '+args.output+' does not exist')
def xsd_validate(template_file): """ Validate a template against the xsd. Return the xml tree if successful. """ with open(template_file) as f: xml_template = f.read() template_xsd_path = os.path.expanduser(config.get('templates', 'template_xsd_path')) log.info("Template xsd: %s", template_xsd_path) xmlschema_doc = etree.parse(template_xsd_path) xmlschema = etree.XMLSchema(xmlschema_doc) xml_tree = etree.fromstring(xml_template) try: xmlschema.assertValid(xml_tree) except etree.DocumentInvalid as e: raise HydraPluginError('Template validation failed: ' + e.message) log.info("Template XSD validation successful.") return xml_tree
def check_args(args): try: int(args.network) except (TypeError, ValueError): raise HydraPluginError('No network is specified') try: int(args.scenario) except (TypeError, ValueError): raise HydraPluginError('No senario is specified') log.info("Checking model file: %s" % args.model_file) if args.model_file is None: raise HydraPluginError('Model file is not specifed') elif os.path.isfile(args.model_file) == False: raise HydraPluginError('Model file ' + args.model_file + ' not found.') elif args.output == None: #if output file is not provided, plug in use default one modelpath = os.path.dirname(args.model_file) args.output = os.path.join(modelpath, 'input.dat') if args.output is None: raise HydraPluginError('No output file specified') elif os.path.exists(os.path.dirname(args.output)) == False: raise HydraPluginError('Output file directory: ' + os.path.dirname(args.output) + ', is not exist')
def validate_plugin_xml(plugin_xml_file_path): log.info('Validating plugin xml file (%s).' % plugin_xml_file_path) try: with open(plugin_xml_file_path) as f: plugin_xml = f.read() except: raise HydraPluginError("Couldn't find plugin.xml.") try: plugin_xsd_path = os.path.expanduser(config.get('plugin', 'plugin_xsd_path')) log.info("Plugin Input xsd: %s", plugin_xsd_path) xmlschema_doc = etree.parse(plugin_xsd_path) xmlschema = etree.XMLSchema(xmlschema_doc) xml_tree = etree.fromstring(plugin_xml) except XMLSyntaxError, e: raise HydraPluginError("There is an error in your XML syntax: %s" % e) except ParseError, e: raise HydraPluginError("There is an error in your XML: %s" % e) except Exception, e: log.exception(e) raise HydraPluginError("An unknown error occurred with the plugin xsd: %s"%e.message) try: xmlschema.assertValid(xml_tree) except etree.DocumentInvalid as e: raise HydraPluginError('Plugin validation failed: ' + e.message) log.info("Plugin XML OK")
#The network ID can be specified to get the network... try: network_id = int(network_id) network = self.connection.call( 'get_network', { 'network_id': network_id, 'scenario_ids': [int(scenario_id)] }) write_output("Network retrieved") except Exception, e: log.exception(e) raise HydraPluginError("Network %s not found." % network_id) else: raise HydraPluginError("A network ID must be specified!") template_id = None if network.types is not None: template_id = network.types[0].template_id self.get_attributes(template_id) #if network.attributes is not None: # raise HydraPluginError("There's no network attributes. Unable to run Model.") for net_ra in network.attributes: net_attr = self.attr_id_map[net_ra.attr_id] else: pass #raise HydraPluginError("There's no network attributes. Unable to run Model.")
def run_model(filename, modelfile): ''' Run the model file ''' #Convert truncated file names, containing a "~1" into the full path if os.name == 'nt': import win32file modelfile = win32file.GetLongPathName(modelfile) mname = os.path.dirname(modelfile) sys.path.append(mname) log.info("Importing the model from %s ", modelfile) mm = importlib.import_module(os.path.basename(modelfile).split('.')[0]) log.info("Importing the model %s", os.path.basename(modelfile).split('.')[0]) run_model = getattr(mm, 'run_model') log.info("Model is imported.") res, instances = run_model(filename) for rs in res: #check solver status and termination conditions # and raise an error due to the termination and status code if (rs.solver.status == SolverStatus.unknown): raise HydraPluginError('Unknow error,(an uninitialized value)') elif (rs.solver.status != SolverStatus.warning): log.info("Solver status warnning") elif (rs.solver.status == SolverStatus.aborted): raise HydraPluginError( 'Terminated due to external conditions (e.g. interrupts)') elif (rs.solver.status == SolverStatus.error): raise HydraPluginError('Terminated internally with error') if rs.solver.termination_condition == TerminationCondition.unknown: raise HydraPluginError( 'solver termination with unknow error, this may indicate that the problem is infeasible' ) elif rs.solver.termination_condition == TerminationCondition.maxTimeLimit: raise HydraPluginError('Exceeded maximum time limit allowed ') elif rs.solver.termination_condition == TerminationCondition.maxIterations: raise HydraPluginError( 'Exceeded maximum number of iterations allowed ') elif rs.solver.termination_condition == TerminationCondition.minFunctionValue: raise HydraPluginError( 'Found solution smaller than specified function value') elif rs.solver.termination_condition == TerminationCondition.minStepLength: raise HydraPluginError( 'Step length is smaller than specified limit') elif rs.solver.termination_condition == TerminationCondition.maxEvaluations: raise HydraPluginError( 'Exceeded maximum number of problem evaluations (e.g., branch and bound nodes' ) elif rs.solver.termination_condition == TerminationCondition.other: raise HydraPluginError(' uncategorized normal termination') elif rs.solver.termination_condition == TerminationCondition.unbounded: raise HydraPluginError('Demonstrated that problem is unbounded') elif rs.solver.termination_condition == TerminationCondition.infeasible: raise HydraPluginError('Demonstrated that problem is infeasible') elif rs.solver.termination_condition == TerminationCondition.invalidProblem: raise HydraPluginError( 'The problem setup or characteristics are not valid for the solver' ) elif rs.solver.termination_condition == TerminationCondition.solverFailure: raise HydraPluginError('Solver failed to terminate correctly') elif rs.solver.termination_condition == TerminationCondition.internalSolverError: raise HydraPluginError('Internal solver error') elif rs.solver.termination_condition == TerminationCondition.error: raise HydraPluginError('Other error') elif rs.solver.termination_condition == TerminationCondition.userInterrupt: raise HydraPluginError('Interrupt signal generated by user') elif rs.solver.termination_condition == TerminationCondition.resourceInterrupt: raise HydraPluginError( 'Interrupt signal in resources used by the solver') #elif rs.solver.termination_condition==TerminationCondition.licensingProblem: #raise HydraPluginError('Problem accessing solver license') log.info("Model is running.") units = get_units(modelfile) return analyse_results(res, instances, units)
def export_timeseries_using_attributes(self, resources, res_type=None): """ Export time series. """ islink = res_type == 'LINK' attributes = [] attr_names = [] attrb_tables = {} for resource in resources: for attr in resource.attributes: if attr.dataset_type == 'timeseries' and attr.is_var is False: if (len(self.time_index) is 0): raise HydraPluginError( "Missing time axis or start date, end date and time step or bad format" ) attr.name = translate_attr_name(attr.name) if attr.name not in attr_names: attrb_tables[attr.name] = attr attributes.append(attr) attr_names.append(attr.name) if len(attributes) > 0: dataset_ids = [] all_res_data = {} #Identify the datasets that we need data for for attribute in attributes: for resource in resources: attr = resource.get_attribute(attr_name=attribute.name) if attr is not None and attr.dataset_id is not None: dataset_ids.append(attr.dataset_id) value = json.loads(attr.value) all_res_data[attr.dataset_id] = value #We need to get the value at each time in the specified time axis, #so we need to identify the relevant timestamps. soap_times = [] for t, timestamp in enumerate(self.time_index.values()): soap_times.append(date_to_string(timestamp)) #Get all the necessary data for all the datasets we have. #all_data = self.connection.call('get_multiple_vals_at_time', # {'dataset_ids':dataset_ids, # 'timestamps' : soap_times}) for attribute in attributes: self.output_file_contents.append("\nparam " + attribute.name + ":\n") self.output_file_contents.append(self.write_time()) for resource in resources: attr = resource.get_attribute(attr_name=attribute.name) if attr is None or attr.dataset_id is None or attr.dataset_type != 'timeseries': continue try: all_data = self.get_time_value( attr.value, self.time_index.values()) except Exception, e: log.exception(e) all_data = None if all_data is None: raise HydraPluginError( "Error finding value attribute %s on" "resource %s" % (attr.name, resource.name)) name = resource.name if islink is True and self.links_as_name is False: name = get_link_name(resource) #self.output_file_contents.append("\n "+name) nname = "\n " + name self.output_file_contents.append(self.ff.format(nname)) for timestamp in self.time_index.values(): tmp = all_data[timestamp] if isinstance(tmp, list): data = "-".join(tmp) ff_ = '{0:<' + self.array_len + '}' data_str = ff_.format(str(data)) else: data = str(tmp) data_str = self.ff.format(str(float(data))) self.output_file_contents.append(data_str) self.output_file_contents.append(';\n')
def import_data(self, include_timeseries=True): scenario = { "name": "Hobbes Import", "description": "Import from hobbes", } #List of parameters to ignore non_attributes = set([ 'origins', 'prmname', 'regions', 'terminals', 'description', 'extras', 'type', 'repo', 'origin' ]) #Make a map from node name to a list of attributes. Do this by first #making a node id map node_name_id_map = {} for n in self.network.nodes: node_name_id_map[n.name] = n.id node_attributes = self.connection.call('get_all_node_attributes', {'network_id': self.network.id}) node_id_attr_map = {} for a in node_attributes: n_attrs = node_id_attr_map.get(a.ref_id, []) n_attrs.append(a) node_id_attr_map[a.ref_id] = n_attrs resource_scenarios = [] #request data for first 2 nodes. for node in self.json_net[:10]: props = node['properties'] name = props['prmname'] node_id = node_name_id_map[name] #repo is a special case repo = self.make_repo_dataset(props['repo']) repo_attr_id = self.attr_name_map['repo'].id ra_id = None for a in node_id_attr_map[node_id]: if a.attr_id == repo_attr_id: ra_id = a.id break repo_rs = dict( resource_attr_id=ra_id, attr_id=repo_attr_id, is_var='N', value=repo, ) resource_scenarios.append(repo_rs) for k, v in props.items(): if k not in non_attributes: if isinstance(v, float): attr_id = self.attr_name_map[k].id dataset = dict( name=k, value=str(v), type='scalar', dimension='dimensionless', unit=None, ) ra_id = None for a in node_id_attr_map[node_id]: if a.attr_id == attr_id: ra_id = a.id break resource_scenario = dict( resource_attr_id=ra_id, attr_id=attr_id, is_var='N', value=dataset, ) resource_scenarios.append(resource_scenario) #timeseries, requested from the hobbes server if include_timeseries is True: extras = props.get('extras', []) if extras is not None and len(extras) > 0: attr_response = requests.get( "http://cwn.casil.ucdavis.edu/network/extras?prmname=%s" % props['prmname']) #JSON attributes else: continue if attr_response.status_code != 200: raise HydraPluginError( "A connection error has occurred with status code: %s" % attr_response.status_code) extra_data = json.loads(attr_response.content) non_attrs = ['prmname', 'readme'] for k, v in extra_data.items(): if k in non_attrs: continue else: if len(v) < 2: continue ts = self.parse_timeseries(v) attr_id = self.attr_name_map[k].id dataset = dict( name=k, value=json.dumps(ts), type='timeseries', dimension='dimensionless', unit=None, ) ra_id = None for a in node_id_attr_map[node_id]: if a.attr_id == attr_id: ra_id = a.id break resource_scenario = dict( resource_attr_id=ra_id, attr_id=attr_id, is_var='N', value=dataset, ) resource_scenarios.append(resource_scenario) scenario['resourcescenarios'] = resource_scenarios new_scenario = self.connection.call('add_scenario', { 'network_id': self.network.id, 'scen': scenario }) self.scenario = new_scenario return new_scenario