def _run_app(self, config): # Get application. appName = config['global_settings']['application'] klass = get_algorithm_class(appName) # Check which data set we're using dataset_id = int(config['global_settings']['dataset_id']) dataset = models.SensorIngest.objects.get(pk=dataset_id) # Get application parameters. kwargs = {} if config.has_section('application_config'): for arg, str_val in config['application_config'].items(): kwargs[arg] = eval(str_val) # Get application inputs. inputs = config['inputs'] topic_map = {} for group, topics in inputs.items(): topic_map[group] = topics.split() now = datetime.datetime.utcnow().replace(tzinfo=utc) project = dataset.project analysis = models.Analysis(added=now, started=now, dataset=dataset, application=appName, debug=True, project_id=dataset.project_id, configuration={ 'parameters': kwargs, 'inputs': topic_map }, name='cli: {}, dataset {}'.format( appName, dataset_id)) analysis.save() db_input = DatabaseInput(dataset.map.id, topic_map, dataset_id) output_format = klass.output_format(db_input) file_output = DatabaseOutputFile(analysis, output_format) # Execute the application. app = klass(db_input, file_output, **kwargs) try: app.run_application() for report in klass.reports(output_format): print(report) finally: analysis.ended = datetime.datetime.utcnow().replace(tzinfo=utc) analysis.save() # Retrieve the map of tables to output CSVs from the application actual_outputs = {} for tableName in app.out.file_table_map.keys(): actual_outputs[tableName] = app.out.file_table_map[tableName].name return actual_outputs
def handle(self, *args, verbosity=1, dry_run=False, **options): # Put of importing modules that access the database to allow # Django to magically install the plumbing first. from openeis.projects import models try: verbosity = int(verbosity) config = ConfigParser() config.optionxform = str #set config file to case sensitive config.read(args[0]) application = config['global_settings']['application'] klass = get_algorithm_class(application) dataset_id = int(config['global_settings']['dataset_id']) dataset = models.SensorIngest.objects.get(pk=dataset_id) try: debug = config.getboolean('global_settings', 'debug') except NoOptionError: debug = False kwargs = {} if config.has_section('application_config'): for arg, str_val in config['application_config'].items(): kwargs[arg] = eval(str_val) topic_map = {} inputs = config['inputs'] for group, topics in inputs.items(): topic_map[group] = topics.split() now = datetime.utcnow().replace(tzinfo=utc) analysis = models.Analysis(added=now, started=now, dataset=dataset, application=application, debug=debug, project_id=dataset.project_id, configuration={ 'parameters': kwargs, 'inputs': topic_map }, name='cli: {}, dataset {}'.format( application, dataset_id)) analysis.save() db_input = DatabaseInput(dataset.map.id, topic_map, dataset_id) output_format = klass.output_format(db_input) file_output = DatabaseOutputFile(analysis, output_format, console_output=True) if (verbosity > 1): print('Running application:', application) if dataset_id is not None: print('- Data set id:', dataset_id) print('- Topic map:', topic_map) print('- Output format:', output_format) app = klass(db_input, file_output, **kwargs) app.run_application() analysis.reports = [ serializers.ReportSerializer(report).data for report in klass.reports(output_format) ] # reports = klass.reports(output_format) for report in analysis.reports: print(report) except Exception as e: analysis.status = "error" # TODO: log errors print(traceback.format_exc()) finally: analysis.progress_percent = 100 analysis.ended = datetime.utcnow().replace(tzinfo=utc) analysis.save()
def run_application(self, configFileName): """ Runs the application with a given configuration file. Parameters: - configFileName: configuration file for application run Returns: - actual_outputs, dictionary, maps table name to file name of run results """ # Note the overall process here follows that of method # openeis/projects/management/commands/runapplication.handle(). # Read the configuration file. self.assertTrue( os.path.isfile(configFileName), msg='Cannot find configuration file "{}"'.format(configFileName) ) config = ConfigParser() config.read(configFileName) # Get application. appName = config['global_settings']['application'] klass = get_algorithm_class(appName) # Check which data set we're using dataset_id = int(config['global_settings']['dataset_id']) dataset = models.SensorIngest.objects.get(pk=dataset_id) # Get application parameters. kwargs = {} if config.has_section('application_config'): for arg, str_val in config['application_config'].items(): kwargs[arg] = eval(str_val) # Get application inputs. inputs = config['inputs'] topic_map = {} for group, topics in inputs.items(): topic_map[group] = topics.split() now = datetime.datetime.utcnow().replace(tzinfo=utc) project = models.Project.objects.get(pk=1) analysis = models.Analysis( added=now, started=now, dataset=dataset, application=appName, debug=True, project_id = dataset.project_id, configuration={ 'parameters': kwargs, 'inputs': topic_map }, name='cli: {}, dataset {}'.format(appName, dataset_id) ) analysis.save() db_input = DatabaseInput(dataset.map.id, topic_map, dataset_id) output_format = klass.output_format(db_input) file_output = DatabaseOutputFile(analysis, output_format) # Execute the application. app = klass(db_input, file_output, **kwargs) try: app.run_application() finally: analysis.ended = datetime.datetime.utcnow().replace(tzinfo=utc) analysis.save() # Retrieve the map of tables to output CSVs from the application actual_outputs = {} for tableName in app.out.file_table_map.keys(): actual_outputs[tableName] = app.out.file_table_map[tableName].name return actual_outputs