def save_job_input_file(scenario): """ Save scenario leve inputs """ scenarioFolder = scenario.getFolder() store.makeFolderSafely(scenarioFolder) expandPath = lambda x: os.path.join(scenarioFolder, x) metricConfiguration = scenario.input['metric configuration'] metric.saveMetricsConfigurationCSV(expandPath('metrics-job-input'), metricConfiguration) store.zipFolder(scenarioFolder + '.zip', scenarioFolder)
def run(self): # Prepare scenarioInput = self.input scenarioFolder = self.getFolder() expandPath = lambda x: os.path.join(scenarioFolder, x) # Register demographics print 'Registering demographics' nodesPath = expandPath('nodes') targetPath = self.getDatasetPath() sourcePath = expandPath(scenarioInput['demographic file name']) datasetStore = dataset_store.create(targetPath, sourcePath) datasetStore.saveNodesSHP(nodesPath) datasetStore.saveNodesCSV(nodesPath) # Apply metric print 'Applying metric' metricModel = metric.getModel(scenarioInput['metric model name']) metricConfiguration = scenarioInput['metric configuration'] metricValueByOptionBySection = datasetStore.applyMetric(metricModel, metricConfiguration) # Build network print 'Building network' networkModel = network.getModel(scenarioInput['network model name']) networkConfiguration = scenarioInput['network configuration'] networkValueByOptionBySection = datasetStore.buildNetwork(networkModel, networkConfiguration) # Update metric print 'Updating metric' metricValueByOptionBySection = datasetStore.updateMetric(metricModel, metricValueByOptionBySection) # Save output print 'Saving output' metric.saveMetricsCSV(expandPath('metrics-global'), metricModel, metricValueByOptionBySection) datasetStore.saveMetricsCSV(expandPath('metrics-local'), metricModel) datasetStore.saveSegmentsSHP(expandPath('networks-existing'), is_existing=True) datasetStore.saveSegmentsSHP(expandPath('networks-proposed'), is_existing=False) # Bundle store.zipFolder(scenarioFolder + '.zip', scenarioFolder) # Validate self.validateParameters() # Save output self.output = { 'variables': { 'node': dict((str(x.id), dict(input=x.input, output=x.output)) for x in datasetStore.cycleNodes()), 'metric': metricValueByOptionBySection, 'network': networkValueByOptionBySection, }, 'statistics': { 'node': datasetStore.getNodeStatistics(), 'metric': datasetStore.getMetricStatistics(), 'network': datasetStore.getNetworkStatistics(), }, 'warnings': store.popWarnings(self.id), } # Commit Session.commit()
def create(self): 'POST /scenarios: Create a new item' # Initialize personID = h.getPersonID() if not personID: return redirect(url('person_login', targetURL=h.encodeURL(h.url('new_scenario')))) # Load try: demographicDatabase_h = int(request.POST.get('demographicDatabase_h', 0)) except ValueError: demographicDatabase_h = 0 if not demographicDatabase_h and 'demographicDatabase' not in request.POST: return cjson.encode(dict(isOk=0, message='The demographicDatabase field is required')) scenarioName = request.POST.get('scenarioName') or 'Untitled' try: scenarioScope = int(request.POST.get('scenarioScope', model.scopePrivate)) except ValueError: scenarioScope = model.scopePrivate metricModelName = request.POST.get('metricModelName', metric.getModelNames()[0]) networkModelName = request.POST.get('networkModelName', network.getModelNames()[0]) callbackURL = request.POST.get('callbackURL') # Create scenario scenario = model.Scenario(personID, scenarioName, scenarioScope) Session.add(scenario) Session.commit() scenarioFolder = scenario.getFolder() if os.path.exists(scenarioFolder): shutil.rmtree(scenarioFolder) store.makeFolderSafely(scenarioFolder) # If the user is using an existing demographicDatabase, if demographicDatabase_h: # Copy source in case it is deleted sourceScenario = Session.query(model.Scenario).get(demographicDatabase_h) sourceScenarioFolder = sourceScenario.getFolder() demographicFileName = sourceScenario.input['demographic file name'] demographicPath = os.path.join(scenarioFolder, demographicFileName) shutil.copyfile(os.path.join(sourceScenarioFolder, demographicFileName), demographicPath) # If the user is uploading a new demographicDatabase, else: # Save original demographicDatabase in case the user wants it later demographicDatabase = request.POST['demographicDatabase'] demographicFileExtension = os.path.splitext(demographicDatabase.filename)[1] demographicFileName = 'demographics' + demographicFileExtension demographicPath = os.path.join(scenarioFolder, demographicFileName) shutil.copyfileobj(demographicDatabase.file, open(demographicPath, 'wb')) demographicDatabase.file.close() # Store input configurationByName = extractConfigurationByName(request.POST, scenarioFolder) scenario.input = { 'demographic file name': str(demographicFileName), 'metric model name': metricModelName, 'metric configuration': configurationByName.get('metric', {}), 'network model name': networkModelName, 'network configuration': configurationByName.get('network', {}), 'callback url': callbackURL, 'host url': request.host_url, } Session.commit() store.zipFolder(scenarioFolder + '.zip', scenarioFolder) # Redirect redirect(url('scenario', id=scenario.id))
def run(self): # Prepare scenarioInput = self.input scenarioFolder = self.getFolder() expandPath = lambda x: os.path.join(scenarioFolder, x) # Setup status reporting from time import localtime, strftime time_format = "%Y-%m-%d %H:%M:%S" # Register demographics Job.log("Registering demographics") print "%s Registering demographics" % strftime(time_format, localtime()) nodesPath = expandPath('nodes') targetPath = self.getDatasetPath() sourcePath = expandPath(scenarioInput['demographic file name']) datasetStore = dataset_store.create(targetPath, sourcePath) datasetStore.saveNodesSHP(nodesPath) datasetStore.saveNodesCSV(nodesPath) # Apply metric Job.log("Applying metric") print "%s Applying metric" % strftime(time_format, localtime()) metricModel = metric.getModel(scenarioInput['metric model name']) metricConfiguration = scenarioInput['metric configuration'] metricValueByOptionBySection = datasetStore.applyMetric(metricModel, metricConfiguration) # Build network Job.log("Building network") print "%s Building network" % strftime(time_format, localtime()) networkModel = network.getModel(scenarioInput['network model name']) networkConfiguration = scenarioInput['network configuration'] networkValueByOptionBySection = datasetStore.buildNetwork(networkModel, networkConfiguration, jobLogger=Job) # Update metric Job.log("Updating metric") print "%s Updating metric" % strftime(time_format, localtime()) metricValueByOptionBySection = datasetStore.updateMetric(metricModel, metricValueByOptionBySection) # Save output Job.log("Saving output") print "%s Saving output" % strftime(time_format, localtime()) metric.saveMetricsConfigurationCSV(expandPath('metrics-job-input'), metricConfiguration) metric.saveMetricsCSV(expandPath('metrics-global'), metricModel, metricValueByOptionBySection) datasetStore.saveMetricsCSV(expandPath('metrics-local'), metricModel) datasetStore.saveSegmentsSHP(expandPath('networks-existing'), is_existing=True) datasetStore.saveSegmentsSHP(expandPath('networks-proposed'), is_existing=False) # Bundle store.zipFolder(scenarioFolder + '.zip', scenarioFolder) # Validate self.validateParameters() # Save output self.output = { 'variables': { 'node': dict((str(x.id), dict(input=x.input, output=x.output)) for x in datasetStore.cycleNodes()), 'metric': metricValueByOptionBySection, 'network': networkValueByOptionBySection, }, 'statistics': { 'node': datasetStore.getNodeStatistics(), 'metric': datasetStore.getMetricStatistics(), 'network': datasetStore.getNetworkStatistics(), }, 'warnings': store.popWarnings(self.id), } # Commit Session.commit()
try: localSocket.bind(('', servicePort)) except socket.error: # print 'Either another instance of %s is running or port %s is in use' % (__file__, port) sys.exit(1) # Load AMQP settings amqpHost, amqpUsername, amqpPassword = getValues('amqp', ['host', 'username', 'password']) incomingQueue, incomingExchange, incomingKey = getValues('amqp incoming', ['queue', 'exchange', 'key']) # Connect to AMQP server connection = amqp.Connection(host=amqpHost, userid=amqpUsername, password=amqpPassword) channel = connection.channel() # For each new scenario, for scenario in Session.query(model.Scenario).filter(model.Scenario.status==model.statusNew): # Mark scenario as pending scenario.status = model.statusPending Session.commit() # Pack incoming message scenarioFolder = scenario.getFolder() scenarioPath = scenarioFolder + '.zip' if not os.path.exists(scenarioPath): store.zipFolder(scenarioPath, scenarioFolder) incomingPack = scenario.id, scenario.input, open(scenarioPath, 'rb').read() # Send incoming message incomingMessage = amqp.Message(pickle.dumps(incomingPack)) incomingMessage.properties['delivery_mode'] = 2 channel.basic_publish(incomingMessage, exchange=incomingExchange, routing_key=incomingKey) # Close channel channel.close() # Close connection connection.close()