Exemplo n.º 1
0
def get_full_run(request):
    '''
        Performs a full run of the current active Model, turning off any
        response options.
        Returns the final step results.
    '''
    log_prefix = 'req({0}): get_full_run():'.format(id(request))
    log.info('>>' + log_prefix)

    response_on = request.json_body['response_on']

    active_model = get_active_model(request)
    if active_model:
        session_lock = acquire_session_lock(request)
        log.info('  session lock acquired (sess:{}, thr_id: {})'
                 .format(id(session_lock), current_thread().ident))

        try:
            weatherer_enabled_flags = [w.on for w in active_model.weatherers]

            if response_on is False:
                for w in active_model.weatherers:
                    if isinstance(w, (Skimmer, Burn, ChemicalDispersion)):
                        w.on = False

            active_model.rewind()

            drop_uncertain_models(request)

            if active_model.has_weathering_uncertainty:
                log.info('Model has weathering uncertainty')
                set_uncertain_models(request)
            else:
                log.info('Model does not have weathering uncertainty')

            begin = time.time()

            for step in active_model:
                output = step
                steps = get_uncertain_steps(request)

            end = time.time()

            if steps and 'WeatheringOutput' in output:
                nominal = output['WeatheringOutput']
                aggregate = defaultdict(list)
                low = {}
                high = {}
                full_output = {}

                for idx, step_output in enumerate(steps):
                    for k, v in step_output['WeatheringOutput'].items():
                        aggregate[k].append(v)

                for k, v in aggregate.items():
                    low[k] = min(v)
                    high[k] = max(v)

                full_output = {'time_stamp': nominal['time_stamp'],
                               'nominal': nominal,
                               'low': low,
                               'high': high}
                for idx, step_output in enumerate(steps):
                    full_output[idx] = step_output['WeatheringOutput']

                output['WeatheringOutput'] = full_output
                output['total_response_time'] = end - begin
            elif 'WeatheringOutput' in output:
                nominal = output['WeatheringOutput']
                full_output = {'time_stamp': nominal['time_stamp'],
                               'nominal': nominal,
                               'low': None,
                               'high': None}
                output['WeatheringOutput'] = full_output
                output['total_response_time'] = end - begin

            active_model.rewind()
        except Exception:
            raise cors_exception(request, HTTPUnprocessableEntity,
                                 with_stacktrace=True)
        finally:
            for a, w in zip(weatherer_enabled_flags, active_model.weatherers):
                w.on = a
            session_lock.release()
            log.info('  session lock released (sess:{}, thr_id: {})'
                     .format(id(session_lock), current_thread().ident))

        log.info('<<' + log_prefix)
        return output
    else:
        raise cors_exception(request, HTTPPreconditionFailed)
Exemplo n.º 2
0
def get_step(request):
    '''
        Generates and returns an image corresponding to the step.
    '''
    log_prefix = 'req({0}): get_step():'.format(id(request))
    log.info('>>' + log_prefix)

    active_model = get_active_model(request)
    if active_model:
        # generate the next step in the sequence.
        session_lock = acquire_session_lock(request)
        log.info('  {} session lock acquired (sess:{}, thr_id: {})'
                 .format(log_prefix, id(session_lock), current_thread().ident))

        try:
            if active_model.current_time_step == -1:
                # our first step, establish uncertain models
                drop_uncertain_models(request)

                log.info('\thas_weathering_uncertainty {0}'.
                         format(active_model.has_weathering_uncertainty))
                if active_model.has_weathering_uncertainty:
                    set_uncertain_models(request)
                else:
                    log.info('Model does not have weathering uncertainty')

            begin = time.time()
            output = active_model.step()

            begin_uncertain = time.time()
            steps = get_uncertain_steps(request)
            end = time.time()

            if steps and 'WeatheringOutput' in output:
                nominal = output['WeatheringOutput']
                aggregate = defaultdict(list)
                low = {}
                high = {}
                full_output = {}

                for idx, step_output in enumerate(steps):
                    # step_output could contain an exception from one
                    # of our uncertainty worker processes.  If so, then
                    # we should propagate the exception with its original
                    # context.
                    if (isinstance(step_output, tuple) and
                            len(step_output) >= 3 and
                            isinstance(step_output[1], Exception)):
                        raise step_output[1].with_traceback(step_output[2])

                    for k, v in step_output['WeatheringOutput'].items():
                        aggregate[k].append(v)

                for k, v in aggregate.items():
                    low[k] = min(v)
                    high[k] = max(v)

                full_output = {'time_stamp': nominal['time_stamp'],
                               'nominal': nominal,
                               'low': low,
                               'high': high}
                for idx, step_output in enumerate(steps):
                    full_output[idx] = step_output['WeatheringOutput']

                output['WeatheringOutput'] = full_output
                output['uncertain_response_time'] = end - begin_uncertain
                output['total_response_time'] = end - begin
            elif 'WeatheringOutput' in output:
                nominal = output['WeatheringOutput']
                full_output = {'time_stamp': nominal['time_stamp'],
                               'nominal': nominal,
                               'low': None,
                               'high': None}
                output['WeatheringOutput'] = full_output
                output['uncertain_response_time'] = end - begin_uncertain
                output['total_response_time'] = end - begin

        except StopIteration:
            log.info('  ' + log_prefix + 'stop iteration exception...')
            drop_uncertain_models(request)
            raise cors_exception(request, HTTPNotFound)
        except Exception:
            log.info('  ' + log_prefix + 'unknown exception...')
            raise cors_exception(request, HTTPUnprocessableEntity,
                                 with_stacktrace=True)
        finally:
            session_lock.release()
            log.info('  {} session lock released (sess:{}, thr_id: {})'
                     .format(log_prefix, id(session_lock),
                             current_thread().ident))

        return output
    else:
        raise cors_exception(request, HTTPPreconditionFailed,
                             explanation=(b'Your session timed out - the model is no longer active'))
Exemplo n.º 3
0
def get_step(request):
    '''
        Generates and returns an image corresponding to the step.
    '''
    log_prefix = 'req({0}): get_step():'.format(id(request))
    log.info('>>' + log_prefix)

    active_model = get_active_model(request)
    if active_model:
        # generate the next step in the sequence.
        gnome_sema = request.registry.settings['py_gnome_semaphore']
        gnome_sema.acquire()
        log.info('  ' + log_prefix + 'semaphore acquired...')

        try:
            if active_model.current_time_step == -1:
                # our first step, establish uncertain models
                drop_uncertain_models(request)

                log.info('\thas_weathering_uncertainty {0}'.format(
                    active_model.has_weathering_uncertainty))
                if active_model.has_weathering_uncertainty:
                    set_uncertain_models(request)
                else:
                    log.info('Model does not have weathering uncertainty')

            begin = time.time()
            output = active_model.step()

            begin_uncertain = time.time()
            steps = get_uncertain_steps(request)
            end = time.time()

            if steps and 'WeatheringOutput' in output:
                nominal = output['WeatheringOutput']
                aggregate = defaultdict(list)
                low = {}
                high = {}
                full_output = {}

                for idx, step_output in enumerate(steps):
                    for k, v in step_output['WeatheringOutput'].iteritems():
                        aggregate[k].append(v)

                for k, v in aggregate.iteritems():
                    low[k] = min(v)
                    high[k] = max(v)

                full_output = {
                    'time_stamp': nominal['time_stamp'],
                    'nominal': nominal,
                    'low': low,
                    'high': high
                }
                for idx, step_output in enumerate(steps):
                    full_output[idx] = step_output['WeatheringOutput']

                output['WeatheringOutput'] = full_output
                output['uncertain_response_time'] = end - begin_uncertain
                output['total_response_time'] = end - begin
            elif 'WeatheringOutput' in output:
                nominal = output['WeatheringOutput']
                full_output = {
                    'time_stamp': nominal['time_stamp'],
                    'nominal': nominal,
                    'low': None,
                    'high': None
                }
                output['WeatheringOutput'] = full_output
                output['uncertain_response_time'] = end - begin_uncertain
                output['total_response_time'] = end - begin

        except StopIteration:
            log.info('  ' + log_prefix + 'stop iteration exception...')
            drop_uncertain_models(request)
            raise cors_exception(request, HTTPNotFound)
        except:
            log.info('  ' + log_prefix + 'unknown exception...')
            raise cors_exception(request,
                                 HTTPUnprocessableEntity,
                                 with_stacktrace=True)
        finally:
            gnome_sema.release()
            log.info('  ' + log_prefix + 'semaphore released...')

        return output
    else:
        raise cors_exception(request, HTTPPreconditionFailed)
Exemplo n.º 4
0
def get_full_run(request):
    '''
        Performs a full run of the current active Model, turning off any
        response options.
        Returns the final step results.
    '''
    active_model = get_active_model(request)
    if active_model:
        gnome_sema = request.registry.settings['py_gnome_semaphore']
        gnome_sema.acquire()

        try:
            weatherer_enabled_flags = [w.on for w in active_model.weatherers]

            for w in active_model.weatherers:
                if isinstance(w, (Skimmer, Burn, ChemicalDispersion)):
                    w.on = False

            active_model.rewind()

            drop_uncertain_models(request)

            if active_model.has_weathering_uncertainty:
                log.info('Model has weathering uncertainty')
                set_uncertain_models(request)
            else:
                log.info('Model does not have weathering uncertainty')

            begin = time.time()

            for step in active_model:
                output = step
                steps = get_uncertain_steps(request)

            end = time.time()

            if steps and 'WeatheringOutput' in output:
                nominal = output['WeatheringOutput']
                aggregate = defaultdict(list)
                low = {}
                high = {}
                full_output = {}

                for idx, step_output in enumerate(steps):
                    for k, v in step_output['WeatheringOutput'].iteritems():
                        aggregate[k].append(v)

                for k, v in aggregate.iteritems():
                    low[k] = min(v)
                    high[k] = max(v)

                full_output = {
                    'time_stamp': nominal['time_stamp'],
                    'nominal': nominal,
                    'low': low,
                    'high': high
                }
                for idx, step_output in enumerate(steps):
                    full_output[idx] = step_output['WeatheringOutput']

                output['WeatheringOutput'] = full_output
                output['total_response_time'] = end - begin
            elif 'WeatheringOutput' in output:
                nominal = output['WeatheringOutput']
                full_output = {
                    'time_stamp': nominal['time_stamp'],
                    'nominal': nominal,
                    'low': None,
                    'high': None
                }
                output['WeatheringOutput'] = full_output
                output['total_response_time'] = end - begin
        except:
            raise cors_exception(request,
                                 HTTPUnprocessableEntity,
                                 with_stacktrace=True)
        finally:
            for a, w in zip(weatherer_enabled_flags, active_model.weatherers):
                w.on = a
            gnome_sema.release()

        return output
    else:
        raise cors_exception(request, HTTPPreconditionFailed)
Exemplo n.º 5
0
def execute_async_model(active_model=None,
                        socket_namespace=None,
                        sockid=None,
                        request=None,
                        send_output=True):
    '''
    Meant to run in a greenlet. This function should take an active model
    and run it, writing each step's output to the socket.
    '''
    print(request.session_hash)
    log = get_greenlet_logger(request)
    log_prefix = 'req{0}: execute_async_model()'.format(id(request))
    sock_session_copy = socket_namespace.get_session(
        sockid)  #use get_session to get a clone of the session
    try:
        wait_time = 16
        socket_namespace.emit('prepared', room=sockid)
        with socket_namespace.session(sockid) as sock_session:
            unlocked = sock_session['lock'].wait(wait_time)
            if not unlocked:
                socket_namespace.emit('timeout',
                                      'Model not started, timed out after '
                                      '{0} sec'.format(wait_time),
                                      room=sockid)
                socket_namespace.on_model_kill(sockid)

        log.info('model run triggered')
        while True:
            output = None
            try:
                if active_model.current_time_step == -1:
                    # our first step, establish uncertain models
                    drop_uncertain_models(request)

                    if active_model.has_weathering_uncertainty:
                        log.info('Model has weathering uncertainty')
                        set_uncertain_models(request)
                    else:
                        log.info('Model does not have '
                                 'weathering uncertainty')

                begin = time.time()

                output = active_model.step()

                begin_uncertain = time.time()
                steps = get_uncertain_steps(request)
                end = time.time()

                if steps and 'WeatheringOutput' in output:
                    nominal = output['WeatheringOutput']
                    aggregate = defaultdict(list)
                    low = {}
                    high = {}
                    full_output = {}

                    for idx, step_output in enumerate(steps):
                        # step_output could contain an exception from one
                        # of our uncertainty worker processes.  If so, then
                        # we should propagate the exception with its
                        # original context.
                        if (isinstance(step_output, tuple)
                                and len(step_output) >= 3
                                and isinstance(step_output[1], Exception)):
                            raise step_output[1].with_traceback(step_output[2])

                        for k, v in step_output['WeatheringOutput'].items():
                            aggregate[k].append(v)

                    for k, v in aggregate.items():
                        low[k] = min(v)
                        high[k] = max(v)

                    full_output = {
                        'time_stamp': nominal['time_stamp'],
                        'nominal': nominal,
                        'low': low,
                        'high': high
                    }

                    for idx, step_output in enumerate(steps):
                        full_output[idx] = step_output['WeatheringOutput']

                    output['WeatheringOutput'] = full_output
                    output['uncertain_response_time'] = end - begin_uncertain
                    output['total_response_time'] = end - begin
                elif 'WeatheringOutput' in output:
                    nominal = output['WeatheringOutput']
                    full_output = {
                        'time_stamp': nominal['time_stamp'],
                        'nominal': nominal,
                        'low': None,
                        'high': None
                    }

                    output['WeatheringOutput'] = full_output
                    output['uncertain_response_time'] = end - begin_uncertain
                    output['total_response_time'] = end - begin
            except StopIteration:
                log.info('  {} stop iteration exception...'.format(log_prefix))
                drop_uncertain_models(request)
                break
            except Exception:
                exc_type, exc_value, _exc_traceback = sys.exc_info()
                traceback.print_exc()
                if ('develop_mode' in list(request.registry.settings.keys())
                        and request.registry.settings['develop_mode'].lower()
                        == 'true'):
                    import pdb
                    pdb.post_mortem(sys.exc_info()[2])

                msg = ('  {}{}'.format(
                    log_prefix,
                    traceback.format_exception_only(exc_type, exc_value)))
                log.critical(msg)
                raise

            sock_session_copy['num_sent'] += 1
            log.debug(sock_session_copy['num_sent'])
            if output and send_output:
                socket_namespace.emit('step', output, room=sockid)
            else:
                socket_namespace.emit('step', sock_session_copy['num_sent'])

            if not socket_namespace.is_async:
                with socket_namespace.session(sockid) as sock_session:
                    sock_session['lock'].clear()
                    print('lock!')

            # kill greenlet after 100 minutes unless unlocked
            wait_time = 6000
            with socket_namespace.session(sockid) as sock_session:
                sock_session['lock'].wait(wait_time)
                print('lock!')
                unlocked = sock_session['lock']
                if not unlocked:
                    socket_namespace.emit(
                        'timeout',
                        'Model run timed out after {0} sec'.format(wait_time),
                        room=sockid)
                    socket_namespace.on_model_kill(sockid)

            gevent.sleep(0.001)
    except GreenletExit:
        log.info('Greenlet exiting early')
        socket_namespace.emit('killed',
                              'Model run terminated early',
                              room=sockid)
        raise

    except Exception:
        exc_type, exc_value, _exc_traceback = sys.exc_info()
        traceback.print_exc()
        if ('develop_mode' in list(request.registry.settings.keys()) and
                request.registry.settings['develop_mode'].lower() == 'true'):
            import pdb
            pdb.post_mortem(sys.exc_info()[2])

        msg = ('  {}{}'.format(
            log_prefix, traceback.format_exception_only(exc_type, exc_value)))
        log.critical(msg)
        log.info('Greenlet terminated due to exception')

        json_exc = json_exception(2, True)
        socket_namespace.emit('runtimeError', json_exc['message'], room=sockid)
        raise

    finally:
        with socket_namespace.session(sockid) as sock_session:
            for k, v in sock_session.items():
                if sock_session_copy[k] != v:
                    log.info(
                        '{0} session property {1} changing from {2} to {3}'.
                        format(sockid, k, v, sock_session_copy[k]))
        socket_namespace.save_session(sockid, sock_session_copy)

    socket_namespace.emit('complete', 'Model run completed')
Exemplo n.º 6
0
def get_step(request):
    '''
        Generates and returns an image corresponding to the step.
    '''
    log_prefix = 'req({0}): get_step():'.format(id(request))
    log.info('>>' + log_prefix)

    active_model = get_active_model(request)
    if active_model:
        # generate the next step in the sequence.
        gnome_sema = request.registry.settings['py_gnome_semaphore']
        gnome_sema.acquire()
        log.info('  ' + log_prefix + 'semaphore acquired...')

        try:
            if active_model.current_time_step == -1:
                # our first step, establish uncertain models
                drop_uncertain_models(request)

                log.info('\thas_weathering_uncertainty {0}'.
                         format(active_model.has_weathering_uncertainty))
                if active_model.has_weathering_uncertainty:
                    set_uncertain_models(request)
                else:
                    log.info('Model does not have weathering uncertainty')

            begin = time.time()
            output = active_model.step()

            begin_uncertain = time.time()
            steps = get_uncertain_steps(request)
            end = time.time()

            if steps and 'WeatheringOutput' in output:
                nominal = output['WeatheringOutput']
                aggregate = defaultdict(list)
                low = {}
                high = {}
                full_output = {}

                for idx, step_output in enumerate(steps):
                    for k, v in step_output['WeatheringOutput'].iteritems():
                        aggregate[k].append(v)

                for k, v in aggregate.iteritems():
                    low[k] = min(v)
                    high[k] = max(v)

                full_output = {'time_stamp': nominal['time_stamp'],
                               'nominal': nominal,
                               'low': low,
                               'high': high}
                for idx, step_output in enumerate(steps):
                    full_output[idx] = step_output['WeatheringOutput']

                output['WeatheringOutput'] = full_output
                output['uncertain_response_time'] = end - begin_uncertain
                output['total_response_time'] = end - begin
            elif 'WeatheringOutput' in output:
                nominal = output['WeatheringOutput']
                full_output = {'time_stamp': nominal['time_stamp'],
                               'nominal': nominal,
                               'low': None,
                               'high': None}
                output['WeatheringOutput'] = full_output
                output['uncertain_response_time'] = end - begin_uncertain
                output['total_response_time'] = end - begin

        except StopIteration:
            log.info('  ' + log_prefix + 'stop iteration exception...')
            drop_uncertain_models(request)
            raise cors_exception(request, HTTPNotFound)
        except:
            log.info('  ' + log_prefix + 'unknown exception...')
            raise cors_exception(request, HTTPUnprocessableEntity,
                                 with_stacktrace=True)
        finally:
            gnome_sema.release()
            log.info('  ' + log_prefix + 'semaphore released...')

        return output
    else:
        raise cors_exception(request, HTTPPreconditionFailed)
Exemplo n.º 7
0
def get_full_run(request):
    '''
        Performs a full run of the current active Model, turning off any
        response options.
        Returns the final step results.
    '''
    active_model = get_active_model(request)
    if active_model:
        gnome_sema = request.registry.settings['py_gnome_semaphore']
        gnome_sema.acquire()

        try:
            weatherer_enabled_flags = [w.on for w in active_model.weatherers]

            for w in active_model.weatherers:
                if isinstance(w, (Skimmer, Burn, ChemicalDispersion)):
                    w.on = False

            active_model.rewind()

            drop_uncertain_models(request)

            if active_model.has_weathering_uncertainty:
                log.info('Model has weathering uncertainty')
                set_uncertain_models(request)
            else:
                log.info('Model does not have weathering uncertainty')

            begin = time.time()

            for step in active_model:
                output = step
                steps = get_uncertain_steps(request)

            end = time.time()

            if steps and 'WeatheringOutput' in output:
                nominal = output['WeatheringOutput']
                aggregate = defaultdict(list)
                low = {}
                high = {}
                full_output = {}

                for idx, step_output in enumerate(steps):
                    for k, v in step_output['WeatheringOutput'].iteritems():
                        aggregate[k].append(v)

                for k, v in aggregate.iteritems():
                    low[k] = min(v)
                    high[k] = max(v)

                full_output = {'time_stamp': nominal['time_stamp'],
                               'nominal': nominal,
                               'low': low,
                               'high': high}
                for idx, step_output in enumerate(steps):
                    full_output[idx] = step_output['WeatheringOutput']

                output['WeatheringOutput'] = full_output
                output['total_response_time'] = end - begin
            elif 'WeatheringOutput' in output:
                nominal = output['WeatheringOutput']
                full_output = {'time_stamp': nominal['time_stamp'],
                               'nominal': nominal,
                               'low': None,
                               'high': None}
                output['WeatheringOutput'] = full_output
                output['total_response_time'] = end - begin
        except:
            raise cors_exception(request, HTTPUnprocessableEntity,
                                 with_stacktrace=True)
        finally:
            for a, w in zip(weatherer_enabled_flags, active_model.weatherers):
                w.on = a
            gnome_sema.release()

        return output
    else:
        raise cors_exception(request, HTTPPreconditionFailed)