def call(self): self.progress = 1 self.message_logger.info("Starting...") # Create build dir. build_dir = tempfile.mkdtemp(prefix="rsBuild.") con = self.get_connection() session = sessionmaker()(bind=con) # If input_path is a file, assemble data dir. if os.path.isfile(self.input_path): data_dir = tempfile.mkdtemp(prefix="run_sasi.") with zipfile.ZipFile(self.input_path, 'r') as zfile: zfile.extractall(data_dir) else: data_dir = self.input_path # @TODO: add validation here? # Read in data. try: base_msg = "Ingesting..." ingest_logger = self.get_logger_for_stage('ingest', base_msg) self.message_logger.info(base_msg) dao = SASI_SqlAlchemyDAO(session=session) sasi_ingestor = SASI_Ingestor( data_dir=data_dir, dao=dao, logger=ingest_logger, config=self.config.get('ingest', {}) ) sasi_ingestor.ingest() except Exception as e: self.logger.exception("Error ingesting") raise e # Run the model. try: base_msg = "Running SASI model ..." run_model_logger = self.get_logger_for_stage('run_model', base_msg) self.message_logger.info(base_msg) run_model_config = self.config.get('run_model', {}) parms = dao.query('__ModelParameters').one() taus = {} omegas = {} for i in range(0,4): taus[i] = getattr(parms, "t_%s" % i) omegas[i] = getattr(parms, "w_%s" % i) model_kwargs = { 't0': parms.time_start, 'tf': parms.time_end, 'dt': parms.time_step, 'effort_model': parms.effort_model, 'taus': taus, 'omegas': omegas, 'dao': dao, 'logger': run_model_logger, 'result_fields': self.config.get('result_fields'), } run_kwargs = {} run_kwargs.update(run_model_config.get('run', {})) batch_size = run_kwargs.setdefault('batch_size', 20) if batch_size == 'auto': run_kwargs['batch_size'] = self.get_run_batch_size(dao) model_kwargs.update(run_model_config) m = SASI_Model(**model_kwargs) m.run(**run_kwargs) except Exception as e: self.logger.exception("Error running model: %s" % e) raise e # Generate metadata. try: base_msg = "Generating metadata..." metadata_logger = self.get_logger_for_stage('metadata', base_msg) self.message_logger.info(base_msg) metadata_dir = os.path.join(build_dir, "metadata") os.mkdir(metadata_dir) sasipedia.generate_sasipedia(targetDir=metadata_dir, dataDir=data_dir) except Exception as e: self.logger.exception("Error generating metadata.") raise e # Generate ouput package. try: output_config = self.config.get('output', {}) base_msg = "Generating output package..." output_package_logger = self.get_logger_for_stage( 'output_package', base_msg) self.message_logger.info(base_msg) self.create_output_package( data_dir=data_dir, metadata_dir=metadata_dir, dao=dao, output_format='georefine', logger=output_package_logger, batch_size=output_config.get('batch_size', 'auto'), output_file=self.output_file, ) except Exception as e: self.logger.exception("Error generating georefine package.") raise e shutil.rmtree(build_dir) self.progress = 100 self.message_logger.info("SASI Run completed, output file is:'%s'" % ( self.output_file)) self.status = 'resolved'
import sasipedia import tempfile import shutil import os import argparse argparser = argparse.ArgumentParser() argparser.add_argument('input') argparser.add_argument('--output', '-o') args = argparser.parse_args() if not args.output: args.output = tempfile.mkdtemp(prefix="sasipedia.") sasipedia.generate_sasipedia(targetDir=args.output, dataDir=args.input) print "Generated sasipedia at '%s'." % args.output