コード例 #1
0
 def setUp(self):
     self.job = self.setup_classic_job()
     session = get_db_session("reslt", "writer")
     for gmf in GMF_DATA():
         output_path = self.generate_output_path(self.job)
         hcw = GMFDBWriter(session, output_path, self.job.id)
         hcw.serialize(gmf)
コード例 #2
0
ファイル: __init__.py プロジェクト: johndouglas/openquake
def run_job(job_file, output_type):
    """Given a job_file, run the job."""

    a_job = Job.from_file(job_file, output_type)
    is_job_valid = a_job.is_valid()

    if is_job_valid[0]:
        a_job.set_status('running')

        try:
            a_job.launch()
        except sqlalchemy.exc.SQLAlchemyError:
            # Try to cleanup the session status to have a chance to update the
            # job record without further errors.
            session = get_db_session("reslt", "writer")
            if session.is_active:
                session.rollback()

            a_job.set_status('failed')

            raise
        except:
            a_job.set_status('failed')

            raise
        else:
            a_job.set_status('succeeded')
    else:
        a_job.set_status('failed')

        LOG.critical("The job configuration is inconsistent:")

        for error_message in is_job_valid[1]:
            LOG.critical("   >>> %s" % error_message)
コード例 #3
0
ファイル: risk.py プロジェクト: johndouglas/openquake
def create_loss_map_writer(job_id, serialize_to, nrml_path, deterministic):
    """Create a loss map writer observing the settings in the config file.

    :param job_id: the id of the job the curve belongs to.
    :type job_id: int
    :param serialize_to: where to serialize
    :type serialize_to: list of strings. Permitted values: 'db', 'xml'.
    :param nrml_path: the full path of the XML/NRML representation of the
        loss map.
    :type nrml_path: string
    :param deterministic: Whether the loss map is deterministic (True) or
        non-deterministic (False)
    :type deterministic: boolean
    :returns: None or an instance of
        :py:class:`output.risk.LossMapXMLWriter` or
        :py:class:`output.risk.LossMapDBWriter`
    """
    writers = []

    if 'db' in serialize_to:
        writers.append(LossMapDBWriter(get_db_session("reslt", "writer"),
                                       nrml_path,
                                       job_id))

    if 'xml' in serialize_to:
        if deterministic:
            writers.append(LossMapXMLWriter(nrml_path))
        else:
            # No XML schema for non-deterministic maps yet (see bug 805434)
            pass

    return writer.compose_writers(writers)
コード例 #4
0
    def setUp(self):
        self.job = self.setup_classic_job()
        self.session = get_db_session("reslt", "writer")
        output_path = self.generate_output_path(self.job)
        self.display_name = os.path.basename(output_path)

        self.writer = GMFDBWriter(self.session, output_path, self.job.id)
        self.reader = GMFDBReader(self.session)
コード例 #5
0
ファイル: __init__.py プロジェクト: johndouglas/openquake
    def get_status_from_db(job_id):
        """
        Get the status of the database record belonging to job ``job_id``.

        :returns: one of strings 'pending', 'running', 'succeeded', 'failed'.
        """
        session = get_db_session("reslt", "reader")
        [status] = session.query(OqJob.status).filter(OqJob.id == job_id).one()
        return status
コード例 #6
0
    def _gmf_db_list(self, job_id):  # pylint: disable=R0201
        """Returns a list of the output IDs of all computed GMFs"""
        session = get_db_session("reslt", "reader")

        ids = session.query(models.Output.id) \
            .filter(models.Output.oq_job_id == job_id) \
            .filter(models.Output.output_type == 'gmf')

        return [row[0] for row in ids]
コード例 #7
0
ファイル: job_unittest.py プロジェクト: johndouglas/openquake
 def test_is_job_completed(self):
     job_id = Job.from_file(helpers.get_data_path(CONFIG_FILE), 'db').job_id
     session = get_db_session("reslt", "writer")
     pairs = [('pending', False), ('running', False),
              ('succeeded', True), ('failed', True)]
     for status, is_completed in pairs:
         session.query(OqJob).update({'status': status})
         session.commit()
         self.assertEqual(Job.is_job_completed(job_id), is_completed)
コード例 #8
0
ファイル: __init__.py プロジェクト: johndouglas/openquake
def prepare_job(params):
    """
    Create a new OqJob and fill in the related OpParams entry.

    Returns the newly created job object.
    """
    session = get_db_session("reslt", "writer")

    # TODO specify the owner as a command line parameter
    owner = session.query(OqUser).filter(OqUser.user_name == 'openquake').one()
    oqp = OqParams(upload=None)
    job = OqJob(owner=owner, path=None, oq_params=oqp,
                job_type=CALCULATION_MODE[params['CALCULATION_MODE']])

    # fill-in parameters
    oqp.job_type = job.job_type
    oqp.region_grid_spacing = float(params['REGION_GRID_SPACING'])
    oqp.component = ENUM_MAP[params['COMPONENT']]
    oqp.imt = ENUM_MAP[params['INTENSITY_MEASURE_TYPE']]
    oqp.truncation_type = ENUM_MAP[params['GMPE_TRUNCATION_TYPE']]
    oqp.truncation_level = float(params['TRUNCATION_LEVEL'])
    oqp.reference_vs30_value = float(params['REFERENCE_VS30_VALUE'])

    if oqp.imt == 'sa':
        oqp.period = float(params.get('PERIOD', 0.0))

    if oqp.job_type != 'classical':
        oqp.gm_correlated = (
            params['GROUND_MOTION_CORRELATION'].lower() != 'false')
    else:
        oqp.imls = [float(v) for v in
                        params['INTENSITY_MEASURE_LEVELS'].split(",")]
        oqp.poes = [float(v) for v in
                        params['POES_HAZARD_MAPS'].split(" ")]

    if oqp.job_type != 'deterministic':
        oqp.investigation_time = float(params.get('INVESTIGATION_TIME', 0.0))
        oqp.min_magnitude = float(params.get('MINIMUM_MAGNITUDE', 0.0))
        oqp.realizations = int(params['NUMBER_OF_LOGIC_TREE_SAMPLES'])

    if oqp.job_type == 'event_based':
        oqp.histories = int(params['NUMBER_OF_SEISMICITY_HISTORIES'])

    # config lat/lon -> postgis -> lon/lat
    coords = [float(v) for v in
                  params['REGION_VERTEX'].split(",")]
    vertices = ["%f %f" % (coords[i + 1], coords[i])
                    for i in xrange(0, len(coords), 2)]
    region = "SRID=4326;POLYGON((%s, %s))" % (", ".join(vertices), vertices[0])
    oqp.region = ga.WKTSpatialElement(region)

    session.add(oqp)
    session.add(job)
    session.commit()

    return job
コード例 #9
0
ファイル: job_unittest.py プロジェクト: johndouglas/openquake
    def test_get_status_from_db(self):
        self.job = Job.from_file(helpers.get_data_path(CONFIG_FILE), 'db')

        session = get_db_session("reslt", "writer")
        session.query(OqJob).update({'status': 'failed'})
        session.commit()
        self.assertEqual(Job.get_status_from_db(self.job.job_id), 'failed')
        session.query(OqJob).update({'status': 'running'})
        session.commit()
        self.assertEqual(Job.get_status_from_db(self.job.job_id), 'running')
コード例 #10
0
ファイル: job_unittest.py プロジェクト: johndouglas/openquake
    def test_set_status(self):
        self.job = Job.from_file(helpers.get_data_path(CONFIG_FILE), 'db')

        session = get_db_session("reslt", "writer")

        status = 'running'
        self.job.set_status(status)

        job = session.query(OqJob).filter(OqJob.id == self.job.job_id).one()

        self.assertEqual(status, job.status)
コード例 #11
0
ファイル: job_unittest.py プロジェクト: johndouglas/openquake
def _toCoordList(polygon):
    session = get_db_session("reslt", "writer")

    pts = []

    # postgis -> lon/lat -> config lat/lon, skip the closing point
    for c in polygon.coords(session)[0][:-1]:
        pts.append("%.2f" % c[1])
        pts.append("%.2f" % c[0])

    return ", ".join(pts)
コード例 #12
0
ファイル: __init__.py プロジェクト: johndouglas/openquake
    def set_status(self, status):
        """
        Set the status of the database record belonging to this job.

        :param status: one of 'pending', 'running', 'succeeded', 'failed'
        :type status: string
        """

        session = get_db_session("reslt", "writer")
        db_job = self.get_db_job(session)
        db_job.status = status
        session.add(db_job)
        session.commit()
コード例 #13
0
    def test_get_db_session(self):
        """
        SessionCache.get() is called with the appropriate environment
        variables.
        """
        for (usr_var, (schema, role), (user, password)) in self.test_data:
            os.environ[usr_var] = "usr12"
            os.environ[usr_var + "_PWD"] = "pwd12"

            session = get_db_session(schema, role)
            self.assertTrue(session is self.expected_session)
            (user, passwd), _ = self.mock_method.call_args
            self.assertEqual("usr12", user)
            self.assertEqual("pwd12", passwd)
コード例 #14
0
    def test_serialize_small(self):
        data = LOSS_MAP_DATA(['a%d' % i for i in range(5)], 20, 4)

        self.job = self.setup_classic_job()
        session = get_db_session("hzrdr", "writer")
        output_path = self.generate_output_path(self.job)

        for i in xrange(0, 10):
            lmw = LossMapDBWriter(session, output_path + str(i), self.job.id)

            # Call the function under test.
            lmw.serialize(data)

        session.commit()
コード例 #15
0
    def test_serialize_small(self):
        data = LOSS_CURVE_DATA(20, 4)

        self.job = self.setup_classic_job()
        session = get_db_session("hzrdr", "writer")
        output_path = self.generate_output_path(self.job)

        for i in xrange(0, 20):
            lcw = LossCurveDBWriter(session, output_path + str(i), self.job.id)

            # Call the function under test.
            lcw.serialize(data)

        session.commit()
コード例 #16
0
    def test_serialize_small(self):
        data = GMF_DATA(20, 4)

        self.job = self.setup_classic_job()
        session = get_db_session("hzrdr", "writer")
        output_path = self.generate_output_path(self.job)

        for i in xrange(0, 10):
            gmfw = GMFDBWriter(session, output_path + str(i), self.job.id)

            # Call the function under test.
            gmfw.serialize(data)

        session.commit()
コード例 #17
0
    def test_serialize_small(self):
        data = HAZARD_CURVE_DATA(['1_1', '1_2', '2_2', '2'], 20, 4)

        self.job = self.setup_classic_job()
        session = get_db_session("hzrdr", "writer")
        output_path = self.generate_output_path(self.job)

        for i in xrange(0, 10):
            hcw = HazardCurveDBWriter(session, output_path + str(i),
                                       self.job.id)

            # Call the function under test.
            hcw.serialize(data)

        session.commit()
コード例 #18
0
 def test_get_db_session_with_no_env(self):
     """
     The default user/passwords will be used.
     """
     for (usr_var, (schema, role), (user, password)) in self.test_data:
         if os.environ.get(usr_var) is not None:
             del os.environ[usr_var]
         pwd_var = usr_var + "_PWD"
         if os.environ.get(pwd_var) is not None:
             del os.environ[pwd_var]
         session = get_db_session(schema, role)
         self.assertTrue(session is self.expected_session)
         (actual_user, actual_password), _ = self.mock_method.call_args
         self.assertEqual(user, actual_user)
         self.assertEqual(password, actual_password)
コード例 #19
0
ファイル: helpers.py プロジェクト: johndouglas/openquake
    def setup_upload(self, dbkey=None):
        """Create an upload with associated inputs.

        :param integer dbkey: if set use the upload record with given db key.
        :returns: a :py:class:`db.alchemy.models.Upload` instance
        """
        session = get_db_session("uiapi", "writer")
        if dbkey:
            upload = session.query(Upload).filter(Upload.id == dbkey).one()
            return upload

        user = session.query(OqUser).filter(
            OqUser.user_name == "openquake").one()
        upload = Upload(owner=user, path=tempfile.mkdtemp())
        session.add(upload)
        session.commit()
        return upload
コード例 #20
0
    def test_deserialize_small(self):
        data = HAZARD_CURVE_DATA(['1_1', '1_2', '2_2', '2'], 20, 4)

        self.job = self.setup_classic_job()
        session = get_db_session("reslt", "writer")
        output_path = self.generate_output_path(self.job)

        hcw = HazardCurveDBWriter(session, output_path, self.job.id)
        hcw.serialize(data)

        session.commit()

        # deserialize
        hcr = HazardCurveDBReader(session)

        for i in xrange(0, 10):
            # Call the function under test.
            hcr.deserialize(hcw.output.id)
コード例 #21
0
ファイル: helpers.py プロジェクト: johndouglas/openquake
    def teardown_upload(self, upload, filesystem_only=True):
        """
        Tear down the file system (and potentially db) artefacts for the
        given upload.

        :param upload: the :py:class:`db.alchemy.models.Upload` instance
            in question
        :param bool filesystem_only: if set the upload/input database records
            will be left intact. This saves time and the test db will be
            dropped/recreated prior to the next db test suite run anyway.
        """
        # This is like "rm -rf path"
        shutil.rmtree(upload.path, ignore_errors=True)
        if filesystem_only:
            return
        session = get_db_session("uiapi", "writer")
        session.delete(upload)
        session.commit()
コード例 #22
0
ファイル: risk.py プロジェクト: johndouglas/openquake
def create_loss_curve_writer(job_id, serialize_to, nrml_path, curve_mode):
    """Create a loss curve writer observing the settings in the config file.

    If no writer is available for the given curve_mode and settings, returns
    None.

    :param job_id: the id of the job the curve belongs to.
    :type job_id: int
    :param serialize_to: where to serialize
    :type serialize_to: list of strings. Permitted values: 'db', 'xml'.
    :param str nrml_path: the full path of the XML/NRML representation of the
        hazard map.
    :param str curve_mode: one of 'loss', 'loss_ratio'
    :returns: None or an instance of
        :py:class:`output.risk.LossCurveXMLWriter`,
        :py:class:`output.risk.LossCurveDBWriter`,
        :py:class:`output.risk.LossRatioCurveXMLWriter`
    """

    assert curve_mode in ('loss', 'loss_ratio')

    writers = []

    if 'db' in serialize_to:
        assert job_id, "No job_id supplied"
        job_id = int(job_id)

        if curve_mode == 'loss':
            writers.append(LossCurveDBWriter(get_db_session("reslt", "writer"),
                                             nrml_path,
                                             job_id))
        elif curve_mode == 'loss_ratio':
            # We are non interested in storing loss ratios in the db
            pass

    if 'xml' in serialize_to:
        if curve_mode == 'loss':
            writer_class = LossCurveXMLWriter
        elif curve_mode == 'loss_ratio':
            writer_class = LossRatioCurveXMLWriter

        writers.append(writer_class(nrml_path))

    return writer.compose_writers(writers)
コード例 #23
0
    def _get_db_gmf(self, gmf_id):
        """Returns a field for the given GMF"""
        session = get_db_session("reslt", "reader")
        grid = self.region.grid
        field = zeros((grid.rows, grid.columns))

        sites = session.query(
                sqlfunc.ST_X(models.GMFData.location),
                sqlfunc.ST_Y(models.GMFData.location),
                models.GMFData.ground_motion) \
            .filter(models.GMFData.output_id == gmf_id)

        for x, y, value in sites:
            site = shapes.Site(x, y)
            grid_point = grid.point_at(site)

            field[grid_point.row][grid_point.column] = value

        return shapes.Field(field)
コード例 #24
0
    def _get_db_curve(self, site):
        """Read hazard curve data from the DB"""
        session = get_db_session("reslt", "reader")

        iml_query = session.query(models.OqParams.imls) \
            .join(models.OqJob) \
            .filter(models.OqJob.id == self.job_id)
        curve_query = session.query(models.HazardCurveData.poes) \
            .join(models.HazardCurve) \
            .join(models.Output) \
            .filter(models.Output.oq_job_id == self.job_id) \
            .filter(models.HazardCurve.statistic_type == 'mean') \
            .filter(sqlfunc.ST_GeoHash(models.HazardCurveData.location, 12)
                        == geohash.encode(site.latitude, site.longitude,
                                          precision=12))

        hc = curve_query.one()
        pms = iml_query.one()

        return Curve(zip(pms.imls, hc.poes))
コード例 #25
0
    def test_csv_to_db_loader_end_to_end(self):
        """
            * Serializes the csv into the database
            * Queries the database for the data just inserted
            * Verifies the data against the csv
            * Deletes the inserted records from the database
        """

        engine = db_utils.get_db_session("eqcat", "writer").connection().engine

        csv_loader = db_loader.CsvModelLoader(self.csv_path, engine, 'eqcat')
        csv_loader.serialize()
        db_rows = self._retrieve_db_data(csv_loader.soup)

        # rewind the file
        csv_loader.csv_fd.seek(0)

        self._verify_db_data(csv_loader, db_rows)

        csv_loader.soup.commit()
コード例 #26
0
ファイル: helpers.py プロジェクト: johndouglas/openquake
    def teardown_output(self, output, teardown_job=True, filesystem_only=True):
        """
        Tear down the file system (and potentially db) artefacts for the
        given output.

        :param output: the :py:class:`db.alchemy.models.Output` instance
            in question
        :param bool teardown_job: the associated job and its related artefacts
            shall be torn down as well.
        :param bool filesystem_only: if set the various database records will
            be left intact. This saves time and the test db will be
            dropped/recreated prior to the next db test suite run anyway.
        """
        job = output.oq_job
        if not filesystem_only:
            session = get_db_session("uiapi", "writer")
            session.delete(output)
            session.commit()
        if teardown_job:
            self.teardown_job(job, filesystem_only=filesystem_only)
コード例 #27
0
    def _serialize_test_helper(self, test_file, expected_tables):
        engine = db_utils.get_db_session("hzrdi", "writer").connection().engine
        java.jvm().java.lang.System.setProperty("openquake.nrml.schema",
                                                xml.nrml_schema_file())
        src_loader = db_loader.SourceModelLoader(test_file, engine)

        results = src_loader.serialize()

        # we should get a 3 item list of results
        self.assertEquals(3, len(results))

        # We expect there to have been 3 inserts.
        # The results are a list of dicts with a single key.
        # The key is the table name (including table space);
        # the value is the id (as an int) of the new record.

        # First, check that the results includes the 3 tables we expect:
        result_tables = [x.keys()[0] for x in results]

        self.assertEqual(expected_tables, result_tables)

        # Everything appears to be fine, but let's query the database to make
        # sure the expected records are there.
        # At this point, we're not going to check every single value; we just
        # want to make sure the records made it into the database.
        tables = src_loader.meta.tables

        # list of tuples of (table name, id)
        table_id_pairs = [x.items()[0] for x in results]

        for table_name, record_id in table_id_pairs:
            table = tables[table_name]

            # run a query against the table object to get a ResultProxy
            result_proxy = table.select(table.c.id == record_id).execute()

            # there should be 1 record here
            self.assertEqual(1, result_proxy.rowcount)

        # clean up db resources
        src_loader.close()
コード例 #28
0
ファイル: helpers.py プロジェクト: johndouglas/openquake
    def teardown_job(self, job, filesystem_only=True):
        """
        Tear down the file system (and potentially db) artefacts for the
        given job.

        :param job: the :py:class:`db.alchemy.models.OqJob` instance
            in question
        :param bool filesystem_only: if set the oq_job/oq_param/upload/input
            database records will be left intact. This saves time and the test
            db will be dropped/recreated prior to the next db test suite run
            anyway.
        """
        oqp = job.oq_params
        if oqp.upload is not None:
            self.teardown_upload(oqp.upload, filesystem_only=filesystem_only)
        if filesystem_only:
            return
        session = get_db_session("uiapi", "writer")
        session.delete(job)
        session.delete(oqp)
        session.commit()
コード例 #29
0
ファイル: helpers.py プロジェクト: johndouglas/openquake
    def setup_output(self, job_to_use=None, output_type="hazard_map",
                     db_backed=True):
        """Create an output object of the given type.

        :param job_to_use: if set use the passed
            :py:class:`db.alchemy.models.OqJob` instance as opposed to
            creating a new one.
        :param str output_type: map type, one of "hazard_map", "loss_map"
        :param bool db_backed: initialize the property of the newly created
            :py:class:`db.alchemy.models.Output` instance with this value.
        :returns: a :py:class:`db.alchemy.models.Output` instance
        """
        job = job_to_use if job_to_use else self.setup_classic_job()
        output = Output(owner=job.owner, oq_job=job, output_type=output_type,
                        db_backed=db_backed)
        output.path = self.generate_output_path(job, output_type)
        output.display_name = os.path.basename(output.path)
        session = get_db_session("uiapi", "writer")
        session.add(output)
        session.commit()
        return output
コード例 #30
0
ファイル: helpers.py プロジェクト: johndouglas/openquake
    def setup_classic_job(self, create_job_path=True, upload_id=None):
        """Create a classic job with associated upload and inputs.

        :param integer upload_id: if set use upload record with given db key.
        :param bool create_job_path: if set the path for the job will be
            created and captured in the job record
        :returns: a :py:class:`db.alchemy.models.OqJob` instance
        """
        session = get_db_session("uiapi", "writer")
        upload = self.setup_upload(upload_id)
        oqp = OqParams()
        oqp.job_type = "classical"
        oqp.upload = upload
        oqp.region_grid_spacing = 0.01
        oqp.min_magnitude = 5.0
        oqp.investigation_time = 50.0
        oqp.component = "gmroti50"
        oqp.imt = "pga"
        oqp.truncation_type = "twosided"
        oqp.truncation_level = 3
        oqp.reference_vs30_value = 760
        oqp.imls = self.IMLS
        oqp.poes = [0.01, 0.10]
        oqp.realizations = 1
        oqp.region = (
            "POLYGON((-81.3 37.2, -80.63 38.04, -80.02 37.49, -81.3 37.2))")
        session.add(oqp)
        job = OqJob(oq_params=oqp, owner=upload.owner, job_type="classical")
        session.add(job)
        session.commit()
        if create_job_path:
            job.path = os.path.join(upload.path, str(job.id))
            session.add(job)
            session.commit()
            os.mkdir(job.path)
            os.chmod(job.path, 0777)
        return job