Exemplo n.º 1
0
    def test_sasi_model(self):
        dao = SASI_SqlAlchemyDAO(session=self.session)
        sasi_ingestor = SASI_Ingestor(dao=dao)
        sasi_ingestor.ingest(data_dir=self.data_dir)
        parameters = dao.query('__ModelParameters').one()
        cells = dao.query('__Cell').all()
        substrates = dao.query('__Substrate').all()
        features = dao.query('__Feature').all()
        gears = dao.query('__Gear').all()
        vas = dao.query('__VA').all()
        efforts = dao.query('__Effort').all()

        taus = {}
        omegas = {}
        for i in range(1,4):
            taus[i] = getattr(parameters, "t_%s" % i)
            omegas[i] = getattr(parameters, "w_%s" % i)

        s = time.time()
        m = SASI_Model(
            t0=parameters.time_start,
            tf=parameters.time_end,
            dt=parameters.time_step,
            taus=taus,
            omegas=omegas,
            dao=dao,
        )
        m.run(batch_size=100)
        e = time.time()
        print "t: ", e - s
Exemplo n.º 2
0
    def test_sasi_ingestor(self):

        if platform.system() == 'Java':
            db_uri = 'h2+zxjdbc:///mem:'
        else:
            db_uri = 'sqlite://'
        engine = create_engine(db_uri)
        connection = engine.connect()
        session = sessionmaker()(bind=connection)

        self.data_dir = self.generate_data_dir(
            time_start=0,
            time_end=1,
            time_step=1,
        )

        dao = SASI_SqlAlchemyDAO(session=session)
        sasi_ingestor = SASI_Ingestor(
            data_dir=self.data_dir, 
            dao=dao,
            hash_cell_size=8,
        )
        sasi_ingestor.ingest()

        substrate_ids = [s.id for s in dao.query('__Substrate').all()]
        self.assertEquals(['S1', 'S2'], sorted(substrate_ids))

        energy_ids = [e.id for e in dao.query('__Energy').all()]
        self.assertEquals(['High', 'Low'], sorted(energy_ids))

        fcat_ids = [fc.id for fc in dao.query('__FeatureCategory').all()]
        self.assertEquals(['FC1', 'FC2'], sorted(fcat_ids))

        feature_ids = [f.id for f in dao.query('__Feature').all()]
        self.assertEquals(['F1', 'F2', 'F3', 'F4'], sorted(feature_ids))

        habs = [h for h in dao.query('__Habitat').all()]

        cells = [c for c in dao.query('__Cell').all()]
        expected_composition = {
            ('S1', 'High'): .25,
            ('S1', 'Low'): .25,
            ('S2', 'High'): .25,
            ('S2', 'Low'): .25,
        }
        for c in cells:
            for key, v in c.habitat_composition.items():
                self.assertAlmostEquals(
                    expected_composition[key],
                    v
                )
Exemplo n.º 3
0
    def test_sasi_ingestor(self):

        if platform.system() == 'Java':
            db_uri = 'h2+zxjdbc:///mem:'
        else:
            db_uri = 'sqlite://'
        engine = create_engine(db_uri)
        connection = engine.connect()
        session = sessionmaker()(bind=connection)

        self.data_dir = self.generate_data_dir(
            time_start=0,
            time_end=1,
            time_step=1,
        )

        dao = SASI_SqlAlchemyDAO(session=session)
        sasi_ingestor = SASI_Ingestor(
            data_dir=self.data_dir,
            dao=dao,
            hash_cell_size=8,
        )
        sasi_ingestor.ingest()

        substrate_ids = [s.id for s in dao.query('__Substrate').all()]
        self.assertEquals(['S1', 'S2'], sorted(substrate_ids))

        energy_ids = [e.id for e in dao.query('__Energy').all()]
        self.assertEquals(['High', 'Low'], sorted(energy_ids))

        fcat_ids = [fc.id for fc in dao.query('__FeatureCategory').all()]
        self.assertEquals(['FC1', 'FC2'], sorted(fcat_ids))

        feature_ids = [f.id for f in dao.query('__Feature').all()]
        self.assertEquals(['F1', 'F2', 'F3', 'F4'], sorted(feature_ids))

        habs = [h for h in dao.query('__Habitat').all()]

        cells = [c for c in dao.query('__Cell').all()]
        expected_composition = {
            ('S1', 'High'): .25,
            ('S1', 'Low'): .25,
            ('S2', 'High'): .25,
            ('S2', 'Low'): .25,
        }
        for c in cells:
            for key, v in c.habitat_composition.items():
                self.assertAlmostEquals(expected_composition[key], v)
Exemplo n.º 4
0
    def call(self):
        self.progress = 1
        self.message_logger.info("Starting...")

        # Create build dir.
        build_dir = tempfile.mkdtemp(prefix="rsBuild.")

        con = self.get_connection()
        session = sessionmaker()(bind=con)

        # If input_path is a file, assemble data dir.
        if os.path.isfile(self.input_path):
            data_dir = tempfile.mkdtemp(prefix="run_sasi.")
            with zipfile.ZipFile(self.input_path, 'r') as zfile:
                zfile.extractall(data_dir)
        else:
            data_dir = self.input_path

        # @TODO: add validation here?

        # Read in data.
        try:
            base_msg = "Ingesting..."
            ingest_logger = self.get_logger_for_stage('ingest', base_msg)
            self.message_logger.info(base_msg)
            dao = SASI_SqlAlchemyDAO(session=session)
            sasi_ingestor = SASI_Ingestor(
                data_dir=data_dir, dao=dao, logger=ingest_logger,
                config=self.config.get('ingest', {})
            )
            sasi_ingestor.ingest()
        except Exception as e:
            self.logger.exception("Error ingesting")
            raise e

        # Run the model.
        try:
            base_msg = "Running SASI model ..."
            run_model_logger = self.get_logger_for_stage('run_model', base_msg)
            self.message_logger.info(base_msg)
            run_model_config = self.config.get('run_model', {})
            parms = dao.query('__ModelParameters').one()

            taus = {}
            omegas = {}
            for i in range(0,4):
                taus[i] = getattr(parms, "t_%s" % i)
                omegas[i] = getattr(parms, "w_%s" % i)

            model_kwargs = {
                't0': parms.time_start,
                'tf': parms.time_end,
                'dt': parms.time_step,
                'effort_model': parms.effort_model,
                'taus': taus,
                'omegas': omegas,
                'dao': dao,
                'logger': run_model_logger,
                'result_fields': self.config.get('result_fields'),
            }

            run_kwargs = {}
            run_kwargs.update(run_model_config.get('run', {}))
            batch_size = run_kwargs.setdefault('batch_size', 20)
            if batch_size == 'auto':
                run_kwargs['batch_size'] = self.get_run_batch_size(dao)

            model_kwargs.update(run_model_config)
            m = SASI_Model(**model_kwargs)
            m.run(**run_kwargs)
        except Exception as e:
            self.logger.exception("Error running model: %s" % e)
            raise e

        # Generate metadata.
        try:
            base_msg = "Generating metadata..."
            metadata_logger = self.get_logger_for_stage('metadata', base_msg)
            self.message_logger.info(base_msg)
            metadata_dir = os.path.join(build_dir, "metadata")
            os.mkdir(metadata_dir)
            sasipedia.generate_sasipedia(targetDir=metadata_dir, dataDir=data_dir)
        except Exception as e:
            self.logger.exception("Error generating metadata.")
            raise e

        # Generate ouput package.
        try:
            output_config = self.config.get('output', {})
            base_msg = "Generating output package..."
            output_package_logger = self.get_logger_for_stage(
                'output_package', base_msg)
            self.message_logger.info(base_msg)

            self.create_output_package(
                data_dir=data_dir, 
                metadata_dir=metadata_dir,
                dao=dao, 
                output_format='georefine',
                logger=output_package_logger,
                batch_size=output_config.get('batch_size', 'auto'),
                output_file=self.output_file,
            )
        except Exception as e:
            self.logger.exception("Error generating georefine package.")
            raise e

        shutil.rmtree(build_dir)

        self.progress = 100
        self.message_logger.info("SASI Run completed, output file is:'%s'" % (
            self.output_file))
        self.status = 'resolved'