Ejemplo n.º 1
0
    def test_sasi_model(self):
        dao = SASI_SqlAlchemyDAO(session=self.session)
        sasi_ingestor = SASI_Ingestor(dao=dao)
        sasi_ingestor.ingest(data_dir=self.data_dir)
        parameters = dao.query('__ModelParameters').one()
        cells = dao.query('__Cell').all()
        substrates = dao.query('__Substrate').all()
        features = dao.query('__Feature').all()
        gears = dao.query('__Gear').all()
        vas = dao.query('__VA').all()
        efforts = dao.query('__Effort').all()

        taus = {}
        omegas = {}
        for i in range(1,4):
            taus[i] = getattr(parameters, "t_%s" % i)
            omegas[i] = getattr(parameters, "w_%s" % i)

        s = time.time()
        m = SASI_Model(
            t0=parameters.time_start,
            tf=parameters.time_end,
            dt=parameters.time_step,
            taus=taus,
            omegas=omegas,
            dao=dao,
        )
        m.run(batch_size=100)
        e = time.time()
        print "t: ", e - s
Ejemplo n.º 2
0
    def test_sasi_ingestor(self):

        if platform.system() == 'Java':
            db_uri = 'h2+zxjdbc:///mem:'
        else:
            db_uri = 'sqlite://'
        engine = create_engine(db_uri)
        connection = engine.connect()
        session = sessionmaker()(bind=connection)

        self.data_dir = self.generate_data_dir(
            time_start=0,
            time_end=1,
            time_step=1,
        )

        dao = SASI_SqlAlchemyDAO(session=session)
        sasi_ingestor = SASI_Ingestor(
            data_dir=self.data_dir,
            dao=dao,
            hash_cell_size=8,
        )
        sasi_ingestor.ingest()

        substrate_ids = [s.id for s in dao.query('__Substrate').all()]
        self.assertEquals(['S1', 'S2'], sorted(substrate_ids))

        energy_ids = [e.id for e in dao.query('__Energy').all()]
        self.assertEquals(['High', 'Low'], sorted(energy_ids))

        fcat_ids = [fc.id for fc in dao.query('__FeatureCategory').all()]
        self.assertEquals(['FC1', 'FC2'], sorted(fcat_ids))

        feature_ids = [f.id for f in dao.query('__Feature').all()]
        self.assertEquals(['F1', 'F2', 'F3', 'F4'], sorted(feature_ids))

        habs = [h for h in dao.query('__Habitat').all()]

        cells = [c for c in dao.query('__Cell').all()]
        expected_composition = {
            ('S1', 'High'): .25,
            ('S1', 'Low'): .25,
            ('S2', 'High'): .25,
            ('S2', 'Low'): .25,
        }
        for c in cells:
            for key, v in c.habitat_composition.items():
                self.assertAlmostEquals(expected_composition[key], v)
Ejemplo n.º 3
0
    def test_sasi_ingestor(self):

        if platform.system() == 'Java':
            db_uri = 'h2+zxjdbc:///mem:'
        else:
            db_uri = 'sqlite://'
        engine = create_engine(db_uri)
        connection = engine.connect()
        session = sessionmaker()(bind=connection)

        self.data_dir = self.generate_data_dir(
            time_start=0,
            time_end=1,
            time_step=1,
        )

        dao = SASI_SqlAlchemyDAO(session=session)
        sasi_ingestor = SASI_Ingestor(
            data_dir=self.data_dir, 
            dao=dao,
            hash_cell_size=8,
        )
        sasi_ingestor.ingest()

        substrate_ids = [s.id for s in dao.query('__Substrate').all()]
        self.assertEquals(['S1', 'S2'], sorted(substrate_ids))

        energy_ids = [e.id for e in dao.query('__Energy').all()]
        self.assertEquals(['High', 'Low'], sorted(energy_ids))

        fcat_ids = [fc.id for fc in dao.query('__FeatureCategory').all()]
        self.assertEquals(['FC1', 'FC2'], sorted(fcat_ids))

        feature_ids = [f.id for f in dao.query('__Feature').all()]
        self.assertEquals(['F1', 'F2', 'F3', 'F4'], sorted(feature_ids))

        habs = [h for h in dao.query('__Habitat').all()]

        cells = [c for c in dao.query('__Cell').all()]
        expected_composition = {
            ('S1', 'High'): .25,
            ('S1', 'Low'): .25,
            ('S2', 'High'): .25,
            ('S2', 'Low'): .25,
        }
        for c in cells:
            for key, v in c.habitat_composition.items():
                self.assertAlmostEquals(
                    expected_composition[key],
                    v
                )
Ejemplo n.º 4
0
    def test_sasi_model(self):
        dao = SASI_SqlAlchemyDAO(session=self.session)

        FeatureCategory = dao.schema['sources']['FeatureCategory']
        feature_categories = [
            FeatureCategory(id='FC1'),
            FeatureCategory(id='FC2')
        ]
        dao.save_all(feature_categories)

        Feature = dao.schema['sources']['Feature']
        features = [
            Feature(id='F1', category='FC1'),
            Feature(id='F2', category='FC2'),
        ]
        dao.save_all(features)

        Gear = dao.schema['sources']['Gear']
        gears = [
            Gear(id='G1', min_depth=0, max_depth=1000),
            Gear(id='GTooDeep', min_depth=5000),
            Gear(id='GTooShallow', max_depth=0),
        ]
        dao.save_all(gears)

        Cell = dao.schema['sources']['Cell']
        cells = [
            Cell(
                id=0, 
                area=100.0, 
                habitat_composition={
                    ('S1', 'Low'): .5,
                    ('S1', 'High'): .5,
                },
                z=500,
            ),
        ]
        dao.save_all(cells)

        VA = dao.schema['sources']['VA']
        vas = [
            VA(
                substrate_id='S1',
                energy_id='High',
                gear_id='G1',
                feature_id='F1',
                s=1,
                r=1,
            ),
            VA(
                substrate_id='S1',
                energy_id='Low',
                gear_id='G1',
                feature_id='F1',
                s=2,
                r=2,
            ),
            VA(
                substrate_id='S1',
                energy_id='High',
                gear_id='G1',
                feature_id='F2',
                s=1,
                r=1,
            ),
            VA(
                substrate_id='S1',
                energy_id='Low',
                gear_id='G1',
                feature_id='F2',
                s=2,
                r=2,
            ),
        ]
        dao.save_all(vas)

        Effort = dao.schema['sources']['Effort']
        efforts = [
            Effort(
                time=0,
                cell_id=0,
                gear_id='G1',
                a=100.0,
                hours_fished=100.0,
                value=100.0
            ),
            # Invalid gear: should be skipped.
            Effort(
                time=0,
                cell_id=0,
                gear_id='G99',
                a=100.0,
                hours_fished=100.0,
                value=100.0
            ),
            # Out of depth limits, should be skipped.
            Effort(
                time=0,
                cell_id=0,
                gear_id='GTooDeep',
                a=100.0,
                hours_fished=100.0,
                value=100.0
            ),
            # Out of depth limits, should be skipped.
            Effort(
                time=0,
                cell_id=0,
                gear_id='GTooShallow',
                a=100.0,
                hours_fished=100.0,
                value=100.0
            ),
            Effort(
                time=1,
                cell_id=0,
                gear_id='G1',
                a=100.0,
                hours_fished=100.0,
                value=100.0
            ),
            # Test empty hours fished, value
            Effort(
                time=2,
                cell_id=0,
                gear_id='G1',
                a=100.0,
                hours_fished=None,
                value=None,
            ),
        ]
        dao.save_all(efforts)

        taus = {
            1: 1,
            2: 2,
            3: 4
        }

        omegas = {
            1: .25,
            2: .5,
            3: 1.0,
        }

        m = SASI_Model(
            t0=0,
            tf=2,
            dt=1,
            taus=taus,
            omegas=omegas,
            effort_model='realized',
            dao=dao,
        )
        m.run(batch_size=100)

        expected_result_dicts = [
            #
            # t=0
            #

            # s=1, r=1
            {
                't': 0, 
                'cell_id': 0, 
                'energy_id': 'High', 
                'substrate_id': 'S1', 
                'feature_id': 'F1', 
                'gear_id': 'G1', 
                'a': 25.0,
                'x': 0.0,
                'y': 6.25,
                'z': -6.25,
                'znet': -6.25,
                'hours_fished': 25.0,
                'value': 25.0,
            },

            # s=1, r=1
            {
                't': 0, 
                'cell_id': 0, 
                'energy_id': 'High', 
                'substrate_id': 'S1', 
                'feature_id': 'F2', 
                'gear_id': 'G1', 
                'a': 25.0,
                'x': 0.0, 
                'y': 6.25,
                'z': -6.25,
                'znet': -6.25,
                'hours_fished': 25.0,
                'value': 25.0,
            },

            # s=2, r=2
            {
                't': 0, 
                'cell_id': 0, 
                'energy_id': 'Low', 
                'substrate_id': 'S1', 
                'feature_id': 'F1', 
                'gear_id': 'G1', 
                'a': 25.0,
                'x': 0.0,
                'y': 12.5,
                'z': -12.5,
                'znet': -12.5,
                'hours_fished': 25.0,
                'value': 25.0,
            },
            # s=2, r=2
            {
                't': 0, 
                'cell_id': 0, 
                'energy_id': 'Low', 
                'substrate_id': 'S1', 
                'feature_id': 'F2', 
                'gear_id': 'G1', 
                'a': 25.0,
                'x': 0.0,
                'y': 12.5,
                'z': -12.5,
                'znet': -12.5,
                'hours_fished': 25.0,
                'value': 25.0,
            },
            #
            # t=1
            #

            # s=1, r=1
            {
                't': 1, 
                'cell_id': 0, 
                'energy_id': 'High', 
                'substrate_id': 'S1', 
                'feature_id': 'F1', 
                'gear_id': 'G1', 
                'a': 25.0,
                'x': 6.25,
                'y': 6.25,
                'z': 0.0,
                'znet': -6.25,
                'hours_fished': 25.0,
                'value': 25.0,
            },
            # s=1, r=1
            {
                't': 1, 
                'cell_id': 0, 
                'energy_id': 'High', 
                'substrate_id': 'S1', 
                'feature_id': 'F2', 
                'gear_id': 'G1', 
                'a': 25.0,
                'x': 6.25,
                'y': 6.25,
                'z': 0.0,
                'znet': -6.25,
                'hours_fished': 25.0,
                'value': 25.0,
            },
            # s=2, r=2
            {
                't': 1, 
                'cell_id': 0, 
                'energy_id': 'Low', 
                'substrate_id': 'S1', 
                'feature_id': 'F1', 
                'gear_id': 'G1', 
                'a': 25.0,
                'x': 6.25,
                'y': 12.5,
                'z': -6.25,
                'znet': -18.75,
                'hours_fished': 25.0,
                'value': 25.0,
            },
            # s=2, r=2
            {
                't': 1, 
                'cell_id': 0, 
                'energy_id': 'Low', 
                'substrate_id': 'S1', 
                'feature_id': 'F2', 
                'gear_id': 'G1', 
                'a': 25.0,
                'x': 6.25,
                'y': 12.5,
                'z': -6.25,
                'znet': -18.75,
                'hours_fished': 25.0,
                'value': 25.0,
            },
            #
            # t=2
            #

            # s=1, r=1
            {
                't': 2, 
                'cell_id': 0, 
                'energy_id': 'High', 
                'substrate_id': 'S1', 
                'feature_id': 'F1', 
                'gear_id': 'G1', 
                'a': 25.0,
                'x': 6.25,
                'y': 6.25,
                'z': 0.0,
                'znet': -6.25,
                'hours_fished': 0.0,
                'value': 0.0,
            },
            # s=1, r=1
            {
                't': 2, 
                'cell_id': 0, 
                'energy_id': 'High', 
                'substrate_id': 'S1', 
                'feature_id': 'F2', 
                'gear_id': 'G1', 
                'a': 25.0,
                'x': 6.25,
                'y': 6.25,
                'z': 0.0,
                'znet': -6.25,
                'hours_fished': 0.0,
                'value': 0.0,
            },
            # s=2, r=2
            {
                't': 2, 
                'cell_id': 0, 
                'energy_id': 'Low', 
                'substrate_id': 'S1', 
                'feature_id': 'F1', 
                'gear_id': 'G1', 
                'a': 25.0,
                'x': 12.5,
                'y': 12.5,
                'z': 0.0,
                'znet': -18.75,
                'hours_fished': 0.0,
                'value': 0.0,
            },
            # s=2, r=2
            {
                't': 2, 
                'cell_id': 0, 
                'energy_id': 'Low', 
                'substrate_id': 'S1', 
                'feature_id': 'F2', 
                'gear_id': 'G1', 
                'a': 25.0,
                'x': 12.5,
                'y': 12.5,
                'z': 0.0,
                'znet': -18.75,
                'hours_fished': 0.0,
                'value': 0.0,
            }
        ]

        actual_results = dao.query('__Result').all()
        actual_result_dicts = self.results_to_dicts(actual_results)

        self.sort_result_dicts(actual_result_dicts)
        self.sort_result_dicts(expected_result_dicts)

        self.assertEquals(len(expected_result_dicts), len(actual_result_dicts))

        for i in range(len(expected_result_dicts)):
            expected = expected_result_dicts[i]
            actual = actual_result_dicts[i]
            self.assertEquals(expected, actual)
Ejemplo n.º 5
0
    def call(self):
        self.progress = 1
        self.message_logger.info("Starting...")

        # Create build dir.
        build_dir = tempfile.mkdtemp(prefix="rsBuild.")

        con = self.get_connection()
        session = sessionmaker()(bind=con)

        # If input_path is a file, assemble data dir.
        if os.path.isfile(self.input_path):
            data_dir = tempfile.mkdtemp(prefix="run_sasi.")
            with zipfile.ZipFile(self.input_path, 'r') as zfile:
                zfile.extractall(data_dir)
        else:
            data_dir = self.input_path

        # @TODO: add validation here?

        # Read in data.
        try:
            base_msg = "Ingesting..."
            ingest_logger = self.get_logger_for_stage('ingest', base_msg)
            self.message_logger.info(base_msg)
            dao = SASI_SqlAlchemyDAO(session=session)
            sasi_ingestor = SASI_Ingestor(
                data_dir=data_dir, dao=dao, logger=ingest_logger,
                config=self.config.get('ingest', {})
            )
            sasi_ingestor.ingest()
        except Exception as e:
            self.logger.exception("Error ingesting")
            raise e

        # Run the model.
        try:
            base_msg = "Running SASI model ..."
            run_model_logger = self.get_logger_for_stage('run_model', base_msg)
            self.message_logger.info(base_msg)
            run_model_config = self.config.get('run_model', {})
            parms = dao.query('__ModelParameters').one()

            taus = {}
            omegas = {}
            for i in range(0,4):
                taus[i] = getattr(parms, "t_%s" % i)
                omegas[i] = getattr(parms, "w_%s" % i)

            model_kwargs = {
                't0': parms.time_start,
                'tf': parms.time_end,
                'dt': parms.time_step,
                'effort_model': parms.effort_model,
                'taus': taus,
                'omegas': omegas,
                'dao': dao,
                'logger': run_model_logger,
                'result_fields': self.config.get('result_fields'),
            }

            run_kwargs = {}
            run_kwargs.update(run_model_config.get('run', {}))
            batch_size = run_kwargs.setdefault('batch_size', 20)
            if batch_size == 'auto':
                run_kwargs['batch_size'] = self.get_run_batch_size(dao)

            model_kwargs.update(run_model_config)
            m = SASI_Model(**model_kwargs)
            m.run(**run_kwargs)
        except Exception as e:
            self.logger.exception("Error running model: %s" % e)
            raise e

        # Generate metadata.
        try:
            base_msg = "Generating metadata..."
            metadata_logger = self.get_logger_for_stage('metadata', base_msg)
            self.message_logger.info(base_msg)
            metadata_dir = os.path.join(build_dir, "metadata")
            os.mkdir(metadata_dir)
            sasipedia.generate_sasipedia(targetDir=metadata_dir, dataDir=data_dir)
        except Exception as e:
            self.logger.exception("Error generating metadata.")
            raise e

        # Generate ouput package.
        try:
            output_config = self.config.get('output', {})
            base_msg = "Generating output package..."
            output_package_logger = self.get_logger_for_stage(
                'output_package', base_msg)
            self.message_logger.info(base_msg)

            self.create_output_package(
                data_dir=data_dir, 
                metadata_dir=metadata_dir,
                dao=dao, 
                output_format='georefine',
                logger=output_package_logger,
                batch_size=output_config.get('batch_size', 'auto'),
                output_file=self.output_file,
            )
        except Exception as e:
            self.logger.exception("Error generating georefine package.")
            raise e

        shutil.rmtree(build_dir)

        self.progress = 100
        self.message_logger.info("SASI Run completed, output file is:'%s'" % (
            self.output_file))
        self.status = 'resolved'