Esempio n. 1
0
 def fixup_old_data(cls, data):
     dm = data.models
     cls._init_models(
         dm,
         (
             'animation',
             'beamAnimation',
             'beamPreviewReport',
             'electronBeam',
             'fieldAnimation',
             'laserPreviewReport',
             'particleAnimation',
             'simulationGrid',
         ),
     )
     pkcollections.unchecked_del(
         dm.simulationGrid,
         'xMin',
         'xMax',
         'xCount',
         'zLambda',
     )
     if 'rmsRadius' in dm.electronBeam and dm.electronBeam.rmsRadius == 0:
         del dm.electronBeam['rmsRadius']
     cls._organize_example(data)
Esempio n. 2
0
def test_unchecked_del():
    from pykern.pkunit import pkeq
    from pykern import pkcollections

    n = {'a': 1, 'b': 2, 'c': 3}
    pkcollections.unchecked_del(n, 'a')
    pkeq({'b': 2, 'c': 3}, n)
    pkcollections.unchecked_del(n, 'a', 'b', 'c')
    pkeq({}, n)
Esempio n. 3
0
def _auth_hook_from_header(values):
    """Migrate from old cookie values

    Always sets _COOKIE_STATE, which is our sentinel.

    Args:
        values (dict): just parsed values
    Returns:
        dict: unmodified or migrated values
    """
    if values.get(_COOKIE_STATE):
        # normal case: we've seen a cookie at least once
        # check for cfg.methods changes
        m = values.get(_COOKIE_METHOD)
        if m and m not in valid_methods:
            # invalid method (changed config), reset state
            pkdlog(
                'possibly misconfigured server: invalid cookie_method={}, clearing values={}',
                m,
                values,
            )
            pkcollections.unchecked_del(
                values,
                _COOKIE_METHOD,
                _COOKIE_USER,
                _COOKIE_STATE,
            )
        return values
    u = values.get('sru') or values.get('uid')
    if not u:
        # normal case: new visitor, and no user/state; set logged out
        # and return all values
        values[_COOKIE_STATE] = _STATE_LOGGED_OUT
        return values
    # Migrate
    o = values.get('sros') or values.get('oauth_login_state')
    s = _STATE_COMPLETE_REGISTRATION
    if o is None or o in ('anonymous', 'a'):
        m = METHOD_GUEST
    elif o in ('logged_in', 'li', 'logged_out', 'lo'):
        m = 'github'
        if 'i' not in o:
            s = _STATE_LOGGED_OUT
    else:
        pkdlog('unknown cookie values, clearing, not migrating: {}', values)
        return {}
    # Upgrade cookie to current structure. Set the sentinel, too.
    values = {
        _COOKIE_USER: u,
        _COOKIE_METHOD: m,
        _COOKIE_STATE: s,
    }
    cookie.set_sentinel(values)
    pkdlog('migrated cookie={}', values)
    return values
Esempio n. 4
0
    def fixup_old_data(cls, data):
        dm = data.models
        dm.pksetdefault(optimizer=PKDict)
        dm.optimizer.pksetdefault(
            constraints=list,
            enabledFields=PKDict,
            fields=list,
        )
        cls._init_models(
            dm,
            (
                # simulationGrid must be first
                'simulationGrid',
                'anode',
                'egunCurrentAnimation',
                'fieldAnimation',
                'fieldCalcAnimation',
                'fieldCalculationAnimation',
                'fieldComparisonAnimation',
                'fieldComparisonReport',
                'fieldReport',
                'impactDensityAnimation',
                'optimizer',
                'optimizerAnimation',
                'optimizerStatus',
                'particle3d',
                'particleAnimation',
                'simulation',
            ),
            dynamic=lambda m: cls.__dynamic_defaults(data, m),
        )
        pkcollections.unchecked_del(dm.particle3d, 'joinEvery')
        #TODO(robnagler) is this a denormalization of conductors?
        s = cls.schema()
        for c in dm.get('conductorTypes', []):
            #TODO(robnagler) can a conductor type be none?
            if c is None:
                continue
#TODO(robnagler) why is this not a bool?
            x = c.setdefault('isConductor', '1' if c.voltage > 0 else '0')
            c.pksetdefault(color=s.get('zeroVoltsColor' if x ==
                                       '0' else 'nonZeroVoltsColor'), )
            #TODO(robnagler) how does this work? bc names are on schema, not conductor
            cls.update_model_defaults(c, c.get('type', 'box'))
        for c in dm.conductors:
            cls.update_model_defaults(c, 'conductorPosition')
        cls._organize_example(data)
Esempio n. 5
0
def fixup_old_data(data, force=False):
    """Upgrade data to latest schema and updates version.

    Args:
        data (dict): to be updated (destructively)
        force (bool): force validation

    Returns:
        dict: upgraded `data`
        bool: True if data changed
    """
    try:
        if not force and 'version' in data and data[
                'version'] == SCHEMA_COMMON['version']:
            return data, False
        try:
            data.fixup_old_version = data['version']
        except KeyError:
            data.fixup_old_version = _OLDEST_VERSION
        data.version = SCHEMA_COMMON['version']
        if not 'simulationType' in data:
            if 'sourceIntensityReport' in data['models']:
                data['simulationType'] = 'srw'
            elif 'fieldAnimation' in data['models']:
                data['simulationType'] = 'warppba'
            elif 'bunchSource' in data['models']:
                data['simulationType'] = 'elegant'
            else:
                pkdlog('simulationType: not found; data={}', data)
                raise AssertionError('must have simulationType')
        elif data['simulationType'] == 'warp':
            data['simulationType'] = 'warppba'
        elif data['simulationType'] == 'fete':
            data['simulationType'] = 'warpvnd'
        if not 'simulationSerial' in data['models']['simulation']:
            data['models']['simulation']['simulationSerial'] = 0
        sirepo.template.import_module(
            data['simulationType']).fixup_old_data(data)
        pkcollections.unchecked_del(data.models, 'simulationStatus')
        pkcollections.unchecked_del(data, 'fixup_old_version')
        return data, True
    except Exception as e:
        pkdlog('{}: error: {}', data, pkdexc())
        raise
Esempio n. 6
0
def fixup_old_data(data, force=False):
    """Upgrade data to latest schema and updates version.

    Args:
        data (dict): to be updated (destructively)
        force (bool): force validation

    Returns:
        dict: upgraded `data`
        bool: True if data changed
    """
    try:
        if not force and 'version' in data and data.version == SCHEMA_COMMON.version:
            return data, False
        try:
            data.fixup_old_version = data.version
        except AttributeError:
            data.fixup_old_version = _OLDEST_VERSION
        data.version = SCHEMA_COMMON.version
        if 'simulationType' not in data:
            if 'sourceIntensityReport' in data.models:
                data.simulationType = 'srw'
            elif 'fieldAnimation' in data.models:
                data.simulationType = 'warppba'
            elif 'bunchSource' in data.models:
                data.simulationType = 'elegant'
            else:
                pkdlog('simulationType: not found; data={}', data)
                raise AssertionError('must have simulationType')
        elif data.simulationType == 'warp':
            data.simulationType = 'warppba'
        elif data.simulationType == 'fete':
            data.simulationType = 'warpvnd'
        if 'simulationSerial' not in data.models.simulation:
            data.models.simulation.simulationSerial = 0
        sirepo.template.import_module(data.simulationType).fixup_old_data(data)
        pkcollections.unchecked_del(data.models, 'simulationStatus')
        pkcollections.unchecked_del(data, 'fixup_old_version')
        return data, True
    except Exception as e:
        pkdlog('{}: error: {}', data, pkdexc())
        raise
Esempio n. 7
0
def save_simulation_json(data, do_validate=True):
    """Prepare data and save to json db

    Args:
        data (dict): what to write (contains simulationId)
    """
    data = fixup_old_data(data)[0]
    # old implementation value
    data.pkdel('computeJobHash')
    s = data.models.simulation
    sim_type = data.simulationType
    fn = sim_data_file(sim_type, s.simulationId)
    with _global_lock:
        need_validate = True
        try:
            # OPTIMIZATION: If folder/name same, avoid reading entire folder
            on_disk = read_json(fn).models.simulation
            need_validate = not (
                on_disk.folder == s.folder and on_disk.name == s.name
            )
        except Exception:
            pass
        if need_validate and do_validate:
            srschema.validate_name(
                data,
                iterate_simulation_datafiles(
                    sim_type,
                    lambda res, _, d: res.append(d),
                    PKDict({'simulation.folder': s.folder}),
                ),
                SCHEMA_COMMON.common.constants.maxSimCopies
            )
            srschema.validate_fields(data, get_schema(data.simulationType))
        s.simulationSerial = _serial_new()
        # Do not write simulationStatus or computeJobCacheKey
        d = copy.deepcopy(data)
        pkcollections.unchecked_del(d.models, 'simulationStatus', 'computeJobCacheKey')
        write_json(fn, d)
    return data
Esempio n. 8
0
    def fixup_old_data(cls, data):
        def _fixup_reflector(m):
            if 'isReflector' not in m:
                return
            if m.isReflector == '1':
                for f in 'specProb', 'diffProb':
                    m[f] = float(m[f])
                if m.specProb > 0:
                    m.reflectorType = 'specular'
                    m.reflectorProbability = m.specProb
                elif m.diffProb > 0:
                    m.reflectorType = 'diffuse'
                    m.reflectorProbability = m.diffProb
            for f in ('isReflector', 'specProb', 'diffProb', 'refScheme'):
                del m[f]

        dm = data.models
        dm.pksetdefault(optimizer=PKDict)
        dm.optimizer.pksetdefault(
            constraints=list,
            enabledFields=PKDict,
            fields=list,
        )
        cls._init_models(
            dm,
            (
                # simulationGrid must be first
                'simulationGrid',
                'anode',
                'egunCurrentAnimation',
                'fieldAnimation',
                'fieldCalcAnimation',
                'fieldCalculationAnimation',
                'fieldComparisonAnimation',
                'fieldComparisonReport',
                'fieldReport',
                'impactDensityAnimation',
                'optimizer',
                'optimizerAnimation',
                'optimizerStatus',
                'particle3d',
                'particleAnimation',
                'simulation',
                'cathode',
            ),
            dynamic=lambda m: cls.__dynamic_defaults(data, m),
        )
        pkcollections.unchecked_del(dm.particle3d, 'joinEvery')
        for m in ('anode', 'cathode'):
            _fixup_reflector(dm[m])
        s = cls.schema()
        for c in dm.conductorTypes:
            x = c.setdefault('isConductor', '1' if c.voltage > 0 else '0')
            # conductor.color is null is examples
            if not c.get('color', 0):
                c.color = s.constants['zeroVoltsColor' if x ==
                                      '0' else 'nonZeroVoltsColor']
            cls.update_model_defaults(c, c.type)
            _fixup_reflector(c)
        for c in dm.conductors:
            cls.update_model_defaults(c, 'conductorPosition')
        if dm.optimizer.objective == 'efficiency':
            dm.optimizer.objective = 'transparency'
        cls._organize_example(data)