Пример #1
0
    def test_create_job_explicit_log_level(self):
        # By default, a job is created with a log level of 'progress'
        # (just to show calculation progress).
        # In this test, we'll specify 'debug' as the log level.
        job = engine.create_job(log_level='debug')

        self.assertEqual('debug', job.log_level)
Пример #2
0
def import_gmf_scenario(fileobj):
    """
    Parse the file with the GMF fields and import it into the table
    gmf_scenario. It also creates a new output record, unrelated to a job.
    Works both with XML files and tab-separated files with format
    (imt, gmvs, location).
    :returns: the generated :class:`openquake.engine.db.models.Output` object
    and the generated :class:`openquake.engine.db.models.OqJob`
    object.
    """
    t0 = time.time()
    fname = fileobj.name

    job = engine.create_job()

    ses_coll, gmf_coll = create_ses_gmf(job, fname)
    imts, tags, rows = read_data(fileobj)
    import_rows(job, ses_coll, gmf_coll, tags, rows)
    job.save_params(
        dict(
            base_path=os.path.dirname(fname),
            description='Scenario importer, file %s' % os.path.basename(fname),
            calculation_mode='scenario',
            hazard_imtls=dict.fromkeys(imts),
            inputs={},
            number_of_ground_motion_fields=len(rows) // len(imts),
            maximum_distance=1000.,
            ))

    job.duration = time.time() - t0
    job.status = 'complete'
    job.save()
    return gmf_coll.output
Пример #3
0
    def test_create_job_explicit_log_level(self):
        # By default, a job is created with a log level of 'progress'
        # (just to show calculation progress).
        # In this test, we'll specify 'debug' as the log level.
        job = engine.create_job(log_level='debug')

        self.assertEqual('debug', job.log_level)
Пример #4
0
 def test_engine_performance_monitor_no_task(self):
     job = engine.create_job()
     operation = str(uuid.uuid1())
     with EnginePerformanceMonitor(operation, job.id) as pmon:
         pass
     self._check_result(pmon)
     pmon.flush()
     records = Performance.objects.filter(operation=operation)
     self.assertEqual(len(records), 1)
 def test_engine_performance_monitor_no_task(self):
     job = engine.create_job()
     operation = str(uuid.uuid1())
     with EnginePerformanceMonitor(operation, job.id) as pmon:
         pass
     self._check_result(pmon)
     pmon.flush()
     records = Performance.objects.filter(operation=operation)
     self.assertEqual(len(records), 1)
Пример #6
0
 def test_engine_performance_monitor(self):
     job = engine.create_job()
     mock_task = mock.Mock()
     mock_task.__name__ = 'mock_task'
     mock_task.request.id = task_id = str(uuid.uuid1())
     with EnginePerformanceMonitor('test', job.id, mock_task) as pmon:
         pass
     self._check_result(pmon)
     # check that one record was stored on the db, as it should
     pmon.flush()
     self.assertEqual(len(Performance.objects.filter(task_id=task_id)), 1)
 def test_engine_performance_monitor(self):
     job = engine.create_job()
     mock_task = mock.Mock()
     mock_task.__name__ = 'mock_task'
     mock_task.request.id = task_id = str(uuid.uuid1())
     with EnginePerformanceMonitor('test', job.id, mock_task) as pmon:
         pass
     self._check_result(pmon)
     # check that one record was stored on the db, as it should
     pmon.flush()
     self.assertEqual(len(Performance.objects.filter(task_id=task_id)), 1)
Пример #8
0
    def test_create_job_specified_user(self):
        user_name = helpers.random_string()
        job = engine.create_job(user_name=user_name)

        self.assertEqual(user_name, job.user_name)
        self.assertEqual('pre_executing', job.status)
        self.assertEqual('progress', job.log_level)

        try:
            models.OqJob.objects.get(id=job.id)
        except exceptions.ObjectDoesNotExist:
            self.fail('Job was not found in the database')
Пример #9
0
    def test_create_job_default_user(self):
        job = engine.create_job()

        self.assertEqual('openquake', job.user_name)
        self.assertEqual('pre_executing', job.status)
        self.assertEqual('progress', job.log_level)

        # Check the make sure it's in the database.
        try:
            models.OqJob.objects.get(id=job.id)
        except exceptions.ObjectDoesNotExist:
            self.fail('Job was not found in the database')
Пример #10
0
    def test_create_job_specified_user(self):
        user_name = helpers.random_string()
        job = engine.create_job(user_name=user_name)

        self.assertEqual(user_name, job.user_name)
        self.assertEqual('pre_executing', job.status)
        self.assertEqual('progress', job.log_level)

        try:
            models.OqJob.objects.get(id=job.id)
        except exceptions.ObjectDoesNotExist:
            self.fail('Job was not found in the database')
Пример #11
0
    def test_create_job_default_user(self):
        job = engine.create_job()

        self.assertEqual('openquake', job.user_name)
        self.assertEqual('pre_executing', job.status)
        self.assertEqual('progress', job.log_level)

        # Check the make sure it's in the database.
        try:
            models.OqJob.objects.get(id=job.id)
        except exceptions.ObjectDoesNotExist:
            self.fail('Job was not found in the database')
Пример #12
0
def import_hazard_curves(fileobj):
    """
    Parse the file with the hazard curves and import it into the tables
    hazard_curve and hazard_curve_data. It also creates a new output record,
    unrelated to a job.

    :param fileobj:
        a file-like object associated to an XML file
    :returns:
        the generated :class:`openquake.engine.db.models.Output` object
        and the generated :class:`openquake.engine.db.models.OqJob` object.
    """
    fname = fileobj.name
    hazcurves = nrml.read(fileobj).hazardCurves
    imt = imt_str = hazcurves['IMT']
    if imt == 'SA':
        imt_str += '(%s)' % hazcurves['saPeriod']
    imls = ~hazcurves.IMLs
    hc_nodes = hazcurves[1:]

    curs = connections['job_init'].cursor().cursor  # DB API cursor
    job = engine.create_job()
    job.save_params(dict(
        base_path=os.path.dirname(fname),
        intensity_measure_types_and_levels={imt_str: imls},
        description='HazardCurve importer, file %s' % os.path.basename(fname),
        calculation_mode='classical'))

    out = models.Output.objects.create(
        display_name='Imported from %r' % fname, output_type='hazard_curve',
        oq_job=job)

    haz_curve = models.HazardCurve.objects.create(
        investigation_time=hazcurves['investigationTime'],
        imt=imt,
        imls=imls,
        quantile=hazcurves.attrib.get('quantileValue'),
        statistics=hazcurves.attrib.get('statistics'),
        sa_damping=hazcurves.attrib.get('saDamping'),
        sa_period=hazcurves.attrib.get('saPeriod'),
        output=out)
    hazard_curve_id = str(haz_curve.id)

    # convert the XML into a tab-separated StringIO
    f = StringIO()
    for node in hc_nodes:
        x, y = ~node.Point.pos
        poes = ~node.poEs
        poes = '{%s}' % str(poes)[1:-1]
        print >> f, '\t'.join([hazard_curve_id, poes,
                               'SRID=4326;POINT(%s %s)' % (x, y)])
    f.reset()
    ## import the file-like object with a COPY FROM
    try:
        curs.copy_expert(
            'copy hzrdr.hazard_curve_data (hazard_curve_id, poes, location) '
            'from stdin', f)
    except:
        curs.connection.rollback()
        raise
    else:
        curs.connection.commit()
    finally:
        f.close()
    job.save()
    return out
Пример #13
0
def import_hazard_curves(fileobj):
    """
    Parse the file with the hazard curves and import it into the tables
    hazard_curve and hazard_curve_data. It also creates a new output record,
    unrelated to a job.

    :param fileobj:
        a file-like object associated to an XML file
    :returns:
        the generated :class:`openquake.engine.db.models.Output` object
        and the generated :class:`openquake.engine.db.models.OqJob` object.
    """
    fname = fileobj.name
    hazcurves = nrml.read(fileobj).hazardCurves
    imt = imt_str = hazcurves['IMT']
    if imt == 'SA':
        imt_str += '(%s)' % hazcurves['saPeriod']
    imls = ~hazcurves.IMLs
    hc_nodes = hazcurves[1:]

    curs = connections['job_init'].cursor().cursor  # DB API cursor
    job = engine.create_job()
    job.save_params(
        dict(base_path=os.path.dirname(fname),
             intensity_measure_types_and_levels={imt_str: imls},
             description='HazardCurve importer, file %s' %
             os.path.basename(fname),
             calculation_mode='classical'))

    out = models.Output.objects.create(display_name='Imported from %r' % fname,
                                       output_type='hazard_curve',
                                       oq_job=job)

    haz_curve = models.HazardCurve.objects.create(
        investigation_time=hazcurves['investigationTime'],
        imt=imt,
        imls=imls,
        quantile=hazcurves.attrib.get('quantileValue'),
        statistics=hazcurves.attrib.get('statistics'),
        sa_damping=hazcurves.attrib.get('saDamping'),
        sa_period=hazcurves.attrib.get('saPeriod'),
        output=out)
    hazard_curve_id = str(haz_curve.id)

    # convert the XML into a tab-separated StringIO
    f = StringIO()
    for node in hc_nodes:
        x, y = ~node.Point.pos
        poes = ~node.poEs
        poes = '{%s}' % str(poes)[1:-1]
        print >> f, '\t'.join(
            [hazard_curve_id, poes,
             'SRID=4326;POINT(%s %s)' % (x, y)])
    f.reset()
    ## import the file-like object with a COPY FROM
    try:
        curs.copy_expert(
            'copy hzrdr.hazard_curve_data (hazard_curve_id, poes, location) '
            'from stdin', f)
    except:
        curs.connection.rollback()
        raise
    else:
        curs.connection.commit()
    finally:
        f.close()
    job.save()
    return out