Ejemplo n.º 1
0
def test_timeout(auth_fc):
    fc = auth_fc

    from pykern import pkconfig, pkunit, pkio
    from pykern import pkjson
    from pykern.pkdebug import pkdp
    from pykern.pkunit import pkok, pkre, pkeq, pkexcept
    import re

    r = fc.sr_get('authGuestLogin', {'simulation_type': fc.sr_sim_type}, redirect=False)
    pkeq(200, r.status_code)
    d = pkjson.load_any(r.data)
    pkeq(True, d.authState.isLoggedIn)
    fc.sr_post('listSimulations', {'simulationType': fc.sr_sim_type})
    fc.sr_auth_state(
        isGuestUser=True,
        isLoggedIn=True,
        isLoginExpired=False,
    )
    fc.sr_get_json('adjustTime', params={'days': '2'})
    fc.sr_auth_state(
        isGuestUser=True,
        isLoggedIn=True,
        isLoginExpired=True,
    )
    with pkexcept('SRException.*guest-expired'):
        fc.sr_post('listSimulations', {'simulationType': fc.sr_sim_type})
Ejemplo n.º 2
0
def test_uniquify_beamline():
    from pykern import pkio
    from pykern import pkunit
    from pykern.pkunit import pkeq
    from pykern import pkjson
    from sirepo.template import madx

    d = pkjson.load_any(pkunit.data_dir().join('in.json'))
    madx.uniquify_elements(d)
    pkeq(1, len(d.models.beamlines), 'expecting one beamline={}',
         d.models.beamlines)
    l = d.models.beamlines[0]['items']
    pkeq(len(list(set(l))), len(l), 'expecting all unique items={}', l)
    e = {e._id: e.original_id for e in d.models.elements}
    r = [e[i] for i in d.models.beamlines[0]['items']]
    pkeq(
        [
            2,
            2,
            5,
            5,
            5,
            2,
            2,
            5,
            5,
            5,
            2,
        ], r,
        'expecting proper reflection of sub-lines. ids of original elements: {}',
        r)
Ejemplo n.º 3
0
def _request(**kwargs):
    def get_api_name():
        if 'api_name' in kwargs:
            return kwargs['api_name']
        f = inspect.currentframe()
        for _ in range(_MAX_FRAME_SEARCH_DEPTH):
            m = re.search(r'^api_.*$', f.f_code.co_name)
            if m:
                return m.group()
            f = f.f_back
        else:
            raise AssertionError(
                '{}: max frame search depth reached'.format(f.f_code)
            )
    k = PKDict(kwargs)
    u = k.pkdel('_request_uri') or cfg.supervisor_uri + sirepo.job.SERVER_URI
    c = k.pkdel('_request_content') if '_request_content' in k else _request_content(k)
    c.pkupdate(
        api=get_api_name(),
        serverSecret=sirepo.job.cfg.server_secret,
    )
    pkdlog('api={} runDir={}', c.api, c.get('runDir'))
    r = requests.post(
        u,
        data=pkjson.dump_bytes(c),
        headers=PKDict({'Content-type': 'application/json'}),
        verify=sirepo.job.cfg.verify_tls,
    )
    r.raise_for_status()
    return pkjson.load_any(r.content)
Ejemplo n.º 4
0
 def parse_response(self, resp, expect_binary_body=False):
     assert resp.code == 200, 'resp={}'.format(resp)
     if 'Set-Cookie' in resp.headers:
         self._headers.Cookie = resp.headers['Set-Cookie']
     if 'json' in resp.headers['content-type']:
         return pkjson.load_any(resp.body)
     try:
         b = pkcompat.from_bytes(resp.body)
         assert not expect_binary_body, \
             'expecting binary body resp={} body={}'.format(
                 resp,
                 b[:1000],
             )
     except UnicodeDecodeError:
         assert expect_binary_body, \
             'unexpected binary body resp={}'.format(resp)
         # Binary data files can't be decoded
         return
     if 'html' in resp.headers['content-type']:
         m = re.search('location = "(/[^"]+)', b)
         if m:
             if 'error' in m.group(1):
                 return PKDict(state='error', error='server error')
             return PKDict(state='redirect', uri=m.group(1))
     return b
Ejemplo n.º 5
0
def host_init(j2_ctx, host):
    from rsconf import db

    jf = db.secret_path(j2_ctx, _PASSWD_SECRET_JSON_F, visibility=db.VISIBILITY_GLOBAL)
    if jf.check():
        with jf.open() as f:
            y = pkjson.load_any(f)
    else:
        y = pkcollections.Dict()
    if not host in y:
        y[host] = _passwd_entry(j2_ctx, host)
        pkjson.dump_pretty(y, filename=jf)
    return """install -m 600 /dev/stdin /root/.netrc <<'EOF'
machine {} login {} password {}
EOF
curl {} | install_server={} bash -s {}
# On {}: ssh {} true""".format(
        _vhost(j2_ctx),
        host,
        y[host],
        j2_ctx.rsconf_db.http_host,
        j2_ctx.rsconf_db.http_host,
        host,
        j2_ctx.bkp.primary,
        host,
    )
Ejemplo n.º 6
0
def _rpc(request):
    """Send an RPC message to the runner daemon, and get the response.

    Args:
        request: the request, as a json-encodeable object

    Returns:
        response: the server response
    """
    request_bytes = pkjson.dump_bytes(request)
    with contextlib.closing(socket.socket(socket.AF_UNIX)) as sock:
        sock.connect(str(srdb.runner_socket_path()))
        # send the request
        sock.sendall(request_bytes)
        # send EOF, so the other side knows we've sent the whole thing
        sock.shutdown(socket.SHUT_WR)
        # read the response
        response_bytes = bytearray()
        while True:
            chunk = sock.recv(_CHUNK_SIZE)
            if not chunk:
                break
            response_bytes += chunk
    if response_bytes == b'':
        raise AssertionError('runner daemon had an unknown error')
    return pkjson.load_any(bytes(response_bytes))
Ejemplo n.º 7
0
 def _pop_users_flattened(self):
     jc = self.j2_ctx
     z = jc.dovecot
     pw_f = db.secret_path(
         jc,
         _PASSWORD_SECRET_JSON_F,
         visibility=_PASSWORD_VISIBILITY,
     )
     pw_modified = False
     if pw_f.check():
         with pw_f.open() as f:
             pw_db = pkjson.load_any(f)
     else:
         pw_db = pkcollections.Dict()
     res = []
     for u, v in z.pop_users.items():
         if not isinstance(v, dict):
             v = pkcollections.Dict(password=v, username=u)
         if not v.username in pw_db:
             pw_modified = True
             pw_db[v.username] \
                 = '{' + z.passdb_scheme + '}' + _sha512_crypt(v.password)
         i = z.base_users.user_spec(u)
         i.pw_hash = pw_db[v.username]
         i.username = v.username
         i.home_d = db.user_home_path(jc, u)
         res.append(i)
         self._setup_procmail(i)
         self.install_access(mode='700', owner=i.uid, group=i.gid)
         self.install_directory(i.home_d.join(z.user_mail_d))
     if pw_modified:
         pkjson.dump_pretty(pw_db, filename=pw_f)
     return sorted(res, key=lambda x: x.username)
Ejemplo n.º 8
0
def test_read_all():
    from pykern import pkio
    from pykern import pkjson
    from pykern import pkunit
    from pykern.pkunit import pkok, pkeq, pkre
    from pykern.pkdebug import pkdp
    from pykern.pkcli import rsmanifest
    import re

    with pkunit.save_chdir_work(is_pkunit_prefix=True) as d:
        rsmanifest.add_code(
            'code1',
            version='1.1',
            uri='http://x.com',
            source_d='/tmp',
            pyenv='py2',
        )
        v = pkjson.load_any(pkio.py_path(rsmanifest.USER_FILE)).version
        pkjson.dump_pretty(
            {'version': v, 'image': {'type': 'docker'}},
            filename=rsmanifest.CONTAINER_FILE,
        )
        m = rsmanifest.read_all()
        pkeq(v, m.version)
        pkeq('docker', m.image.type)
        pkeq('1.1', m.codes.py2.code1.version)
Ejemplo n.º 9
0
def install_crt_and_login(compt, j2_ctx):
    from rsconf.pkcli import tls

    if not update_j2_ctx(j2_ctx):
        return
    jf = db.secret_path(j2_ctx,
                        _PASSWD_SECRET_JSON_F,
                        visibility=_PASSWD_VISIBILITY)
    with jf.open() as f:
        y = pkjson.load_any(jf)
    u = j2_ctx.rsconf_db.host
    p = y.get(u, None)
    if not p:
        return
    j2_ctx.docker.auths[j2_ctx.docker_registry.http_addr] = dict(
        auth=pkcompat.from_bytes(
            base64.b64encode(
                pkcompat.to_bytes(u + ':' + pkcompat.from_bytes(p))), ), )
    compt.install_access(mode='700', owner=j2_ctx.docker_registry.run_u)
    crt = component.tls_key_and_crt(j2_ctx, j2_ctx.docker_registry.host).crt
    if not tls.is_self_signed_crt(crt):
        return
    compt.install_directory(_CERTS_D)
    d = _CERTS_D.join(j2_ctx.docker_registry.http_addr)
    compt.install_directory(d)
    compt.install_access(mode='400', owner=j2_ctx.docker_registry.run_u)
    compt.install_abspath(crt, d.join('ca.crt'))
    # need in /etc/pki as well (now)
    # https://success.docker.com/article/i-get-x509-certificate-signed-by-unknown-authority-error-when-i-try-to-login-to-my-dtr-with-default-certificates
    compt.install_abspath(
        crt, _CA_TRUST_D.join(j2_ctx.docker_registry.host + '.crt'))
    compt.append_root_bash('update-ca-trust')
Ejemplo n.º 10
0
Archivo: ml.py Proyecto: cchall/sirepo
def _classification_metrics_report(frame_args, filename):
    def _get_lables():
        l = []
        for k in d:
            if not isinstance(d[k], PKDict):
                continue
            for x in d[k]:
                if x not in l:
                    l.append(x)
        return l

    def _get_matrix():
        r = []
        for k in d:
            if not isinstance(d[k], PKDict):
                continue
            try:
                x = [e[k]]
            except KeyError:
                x = [k]
            x.extend(d[k].values())
            r.append(x)
        return r

    e = _get_classification_output_col_encoding(frame_args)
    d = pkjson.load_any(frame_args.run_dir.join(filename))
    return PKDict(
        labels=_get_lables(),
        matrix=_get_matrix(),
    )
Ejemplo n.º 11
0
    def run_dir_status(self, run_dir):
        """Get the current status of whatever's happening in run_dir.

        Returns:
          Tuple of (jhash or None, status of that job)

        """
        disk_in_path = run_dir.join('in.json')
        disk_status_path = run_dir.join('status')
        if disk_in_path.exists() and disk_status_path.exists():
            # status should be recorded on disk XOR in memory
            assert run_dir not in self.report_jobs
            disk_in_text = pkio.read_text(disk_in_path)
            disk_jhash = pkjson.load_any(disk_in_text).reportParametersHash
            disk_status = pkio.read_text(disk_status_path)
            if disk_status == 'pending':
                # We never write this, so it must be stale, in which case
                # the job is no longer pending...
                pkdlog(
                    'found "pending" status, treating as "error" ({})',
                    disk_status_path,
                )
                disk_status = runner_client.JobStatus.ERROR
            return disk_jhash, runner_client.JobStatus(disk_status)
        elif run_dir in self.report_jobs:
            job_info = self.report_jobs[run_dir]
            return job_info.jhash, job_info.status
        else:
            return None, runner_client.JobStatus.MISSING
Ejemplo n.º 12
0
def default_command(in_file):
    """Reads `in_file` passes to `msg.jobCmd`

    Must be called in run_dir

    Writes its output on stdout.

    Args:
        in_file (str): json parsed to msg
    Returns:
        str: json output of command, e.g. status msg
    """
    try:
        job.init()
        f = pkio.py_path(in_file)
        msg = pkjson.load_any(f)
        #TODO(e-carlin): find common place to serialize/deserialize paths
        msg.runDir = pkio.py_path(msg.runDir)
        f.remove()
        res = globals()['_do_' + msg.jobCmd](msg,
                                             sirepo.template.import_module(
                                                 msg.simulationType))
        if res is None:
            return
        r = PKDict(res).pksetdefault(state=job.COMPLETED)
    except Exception as e:
        r = PKDict(
            state=job.ERROR,
            error=e.sr_args.error
            if isinstance(e, sirepo.util.UserAlert) else str(e),
            stack=pkdexc(),
        )
    return pkjson.dump_pretty(r, pretty=False)
Ejemplo n.º 13
0
 def _write_sasl(self, jc, z):
     if not z.have_sasl:
         return
     self.install_access(mode='400', owner=jc.rsconf_db.root_u)
     r = []
     for domain, u in z.sasl_users.items():
         for user, password in u.items():
             r.append(
                 pkcollections.Dict(
                     domain=domain,
                     user=user,
                     password=password,
                 ), )
     jf = _sasl_password_path(jc)
     if jf.check():
         with jf.open() as f:
             y = pkjson.load_any(f)
         for u, p in y.items():
             x = u.split('@')
             r.append(
                 pkcollections.Dict(
                     domain=x[1],
                     user=x[0],
                     password=p,
                 ), )
     z.sasl_users_flattened = sorted(r, key=lambda x: x.user + x.domain)
     self.install_resource(
         'postfix/smtpd-sasldb.conf',
         jc,
         '/etc/sasl2/smtpd-sasldb.conf',
     )
Ejemplo n.º 14
0
def test_read_all():
    from pykern import pkio
    from pykern import pkjson
    from pykern import pkunit
    from pykern.pkunit import pkok, pkeq, pkre
    from pykern.pkdebug import pkdp
    from pykern.pkcli import rsmanifest
    import re

    with pkunit.save_chdir_work(is_pkunit_prefix=True) as d:
        rsmanifest.add_code(
            'code1',
            version='1.1',
            uri='http://x.com',
            source_d='/tmp',
            pyenv='py2',
        )
        v = pkjson.load_any(pkio.py_path(rsmanifest.USER_FILE)).version
        pkjson.dump_pretty(
            {
                'version': v,
                'image': {
                    'type': 'docker'
                }
            },
            filename=rsmanifest.CONTAINER_FILE,
        )
        m = rsmanifest.read_all()
        pkeq(v, m.version)
        pkeq('docker', m.image.type)
        pkeq('1.1', m.codes.py2.code1.version)
Ejemplo n.º 15
0
    async def run_extract_job(self, run_dir, jhash, subcmd, arg):
        pkdc('{} {}: {} {}', run_dir, jhash, subcmd, arg)
        status = self.report_job_status(run_dir, jhash)
        if status is runner_client.JobStatus.MISSING:
            pkdlog('{} {}: report is missing; skipping extract job', run_dir,
                   jhash)
            return {}
        # figure out which backend and any backend-specific info
        runner_info_file = run_dir.join(_RUNNER_INFO_BASENAME)
        if runner_info_file.exists():
            runner_info = pkjson.load_any(runner_info_file)
        else:
            # Legacy run_dir
            runner_info = pkcollections.Dict(
                version=1,
                backend='local',
                backend_info={},
            )
        assert runner_info.version == 1

        # run the job
        cmd = ['sirepo', 'extract', subcmd, arg]
        result = await _BACKENDS[runner_info.backend].run_extract_job(
            run_dir,
            cmd,
            runner_info.backend_info,
        )

        if result.stderr:
            pkdlog(
                'got output on stderr ({} {}):\n{}',
                run_dir,
                jhash,
                result.stderr.decode('utf-8', errors='ignore'),
            )

        if result.returncode != 0:
            pkdlog(
                'failed with return code {} ({} {}), stdout:\n{}',
                result.returncode,
                run_dir,
                subcmd,
                result.stdout.decode('utf-8', errors='ignore'),
            )
            raise AssertionError

        return pkjson.load_any(result.stdout)
Ejemplo n.º 16
0
 def _parse_par(fn):
     data_file = fn.basename.replace('-flash.par', '')
     return flash_parser.ParameterParser().parse(
         pkjson.load_any(
             pkio.read_text(
                 pkunit.data_dir().join(f'{data_file}-sirepo-data.json'))),
         pkio.read_text(fn),
     )
Ejemplo n.º 17
0
def parse_par(sim_id, par_path):
    """Returns parsed flash.par values.
    """
    sim_path = _sim_path_from_id(sim_id)
    return flash_parser.ParameterParser().parse(
        pkjson.load_any(pkio.read_text(sim_path)),
        pkio.read_text(par_path),
    )
Ejemplo n.º 18
0
def test_load_any():
    import json
    from pykern import pkjson
    from pykern.pkunit import pkeq

    j = json.dumps(['a', 'b'])
    j2 = pkjson.load_any(j)
    pkeq('a', j2[0])
Ejemplo n.º 19
0
 def __slots_from_dump(cls, pool_name):
     p = pkio.py_path(_POOLS_DUMP_FILE)
     if not p.exists():
         return PKDict()
     slots = PKDict()
     for s in pkjson.load_any(p).pkunchecked_nested_get(
             f'{pool_name}.slots', ) or []:
         slots[s.cname] = s
     return slots
Ejemplo n.º 20
0
def test_srw_create_predefined():
    from pykern import pkunit
    from pykern import pkjson
    import sirepo.pkcli.srw

    d = pkunit.empty_work_dir()
    sirepo.pkcli.srw.create_predefined(d)
    j = pkjson.load_any(d.listdir()[0])
    pkunit.pkeq(22, len(j.beams))
Ejemplo n.º 21
0
def _get_default_drift():
    """The function parses srw.js file to find the default values for drift propagation parameters, which can be
    sometimes missed in the exported .py files (when distance = 0), but should be presented in .json files.

    Returns:
        str: default drift propagation paramters
    """
    c = pkio.read_text(_JS_DIR.join('srw.js'))
    m = re.search(r'function defaultDriftPropagationParams.*?return\s*(\[[^\]]+\])', c, re.DOTALL)
    return pkjson.load_any(m.group(1))
Ejemplo n.º 22
0
def read_all():
    """Merge all manifests

    Returns:
        dict: merged data
    """
    from pykern import pkio
    from pykern import pkjson

    fn = pkio.py_path(USER_FILE)
    # Both must exist or error
    u = pkjson.load_any(fn)
    c = pkjson.load_any(pkio.py_path(CONTAINER_FILE))
    assert u.version == c.version, \
        '(user.version) {} != {} (container.version)'.format(u.version, c.version)
    # There are "guaranteed" to be no collisions, but if there are
    # we override user.
    c.update(u)
    return c
Ejemplo n.º 23
0
def test_dump_bytes():
    import json
    from pykern import pkjson
    from pykern.pkunit import pkeq

    v = ['a', 'b']
    expect = json.dumps(v).encode(pkjson.ENCODING).replace(' ', '')
    actual = pkjson.dump_bytes(v)
    pkeq(expect, actual)
    actual = pkjson.load_any(actual)
    pkeq(v, actual)
Ejemplo n.º 24
0
    def sr_post(self, route_or_uri, data, headers=None):
        from pykern import pkjson
        import requests

        r = requests.post(
            (f'http://{self._ip}:{self._nginx_proxy_port}' +
             f'{self._server_route(route_or_uri)}'),
            json=data,
            headers=headers,
        )
        r.raise_for_status()
        return pkjson.load_any(r.text)
Ejemplo n.º 25
0
def test_add_code():
    from pykern import pkio
    from pykern import pkjson
    from pykern import pkunit
    from pykern.pkunit import pkok, pkeq, pkre
    from pykern.pkdebug import pkdp
    from pykern.pkcli import rsmanifest
    import re

    with pkunit.save_chdir_work(is_pkunit_prefix=True) as d:
        rsmanifest.add_code('A', 'b', 'c', 'd', pyenv='v')
        j = pkjson.load_any(pkio.py_path(rsmanifest.USER_FILE).read())
        pkok(20170101.0  < float(j.version), 'version must be after 2017')
        pkeq('A', j.codes.v.a.name)
        pkeq('b', j.codes.v.a.version)
        rsmanifest.add_code('a', 'bb', 'cc', 'dd')
        j = pkjson.load_any(pkio.expand_user_path(rsmanifest.USER_FILE).read())
        pkeq('A', j.codes.v.a.name)
        pkeq('a', j.codes[''].a.name)
        pkeq('bb', j.codes[''].a.version)
        pkre('20.*T.*Z', j.codes[''].a.installed)
Ejemplo n.º 26
0
def test_parse_madx_file():
    from pykern import pkio, pkjson
    from pykern.pkunit import pkeq
    from sirepo.template import madx, madx_parser

    for name in ('particle_track', ):
        actual = madx_parser.parse_file(
            pkio.read_text(pkunit.data_dir().join(f'{name}.madx')))
        madx._fixup_madx(actual)
        del actual['version']
        expect = pkjson.load_any(pkunit.data_dir().join(f'{name}.json'))
        pkeq(expect, actual)
Ejemplo n.º 27
0
def test_add_code():
    from pykern import pkio
    from pykern import pkjson
    from pykern import pkunit
    from pykern.pkunit import pkok, pkeq, pkre
    from pykern.pkdebug import pkdp
    from pykern.pkcli import rsmanifest
    import re

    with pkunit.save_chdir_work(is_pkunit_prefix=True) as d:
        rsmanifest.add_code('A', 'b', 'c', 'd', pyenv='v')
        j = pkjson.load_any(pkio.py_path(rsmanifest.USER_FILE).read())
        pkok(20170101.0 < float(j.version), 'version must be after 2017')
        pkeq('A', j.codes.v.a.name)
        pkeq('b', j.codes.v.a.version)
        rsmanifest.add_code('a', 'bb', 'cc', 'dd')
        j = pkjson.load_any(pkio.expand_user_path(rsmanifest.USER_FILE).read())
        pkeq('A', j.codes.v.a.name)
        pkeq('a', j.codes[''].a.name)
        pkeq('bb', j.codes[''].a.version)
        pkre('20.*T.*Z', j.codes[''].a.installed)
Ejemplo n.º 28
0
def background_percent_complete(report, run_dir, is_running):
    def _grid_columns():
        c = _grid_evolution_columns(run_dir)
        return [x for x in c if x[0] != '#'] if c \
            else None

    def _plot_filenames():
        return [
            PKDict(
                time=_time_and_units(yt.load(str(f)).parameters['time']),
                filename=f.basename,
            )
            for f in files
        ]

    def _plot_vars():
        names = []
        if len(files):
            io = simulation_db.read_json(
                run_dir.join(template_common.INPUT_BASE_NAME),
            ).models.IO_IOMain
            idx = 1
            while io.get(f'plot_var_{idx}', ''):
                n = io[f'plot_var_{idx}']
                if n != 'none':
                    names.append(n)
                idx += 1
        return names

    res = PKDict(
        percentComplete=0 if is_running else 100,
    )
    if report == 'setupAnimation':
        f = run_dir.join(_SIM_DATA.SETUP_PARAMS_SCHEMA_FILE)
        if f.exists():
            res.pkupdate(
                frameCount=1,
                flashSchema=pkjson.load_any(pkio.read_text(f))
            )
    else:
        _init_yt()
        files = _h5_file_list(run_dir)
        if is_running and len(files):
            # the last file may be unfinished if the simulation is running
            files.pop()
        res.pkupdate(
            frameCount=len(files),
            plotVars=_plot_vars(),
            plotFiles=_plot_filenames(),
            gridEvolutionColumns=_grid_columns(),
        )
    return res
Ejemplo n.º 29
0
def update_sim_from_par(sim_id, par_path):
    sim_path = _sim_path_from_id(sim_id)
    data = pkjson.load_any(pkio.read_text(sim_path))
    parser = flash_parser.ParameterParser()
    values = parser.parse(data, pkio.read_text(par_path))
    # reset all model fields to default and override with new par values
    for (m, fields) in parser.schema.model.items():
        for f in fields:
            if f in values:
                data.models[m][f] = values[f]
            else:
                data.models[m][f] = fields[f][2]
    pkjson.dump_pretty(data, sim_path)
Ejemplo n.º 30
0
 async def on_stdout_read(self, text):
     if self._terminating or not self.msg.opId:
         return
     try:
         await self.dispatcher.send(
             self.dispatcher.format_op(
                 self.msg,
                 job.OP_RUN if self._is_compute else job.OP_ANALYSIS,
                 reply=pkjson.load_any(text),
             )
         )
     except Exception as exc:
         pkdlog('text={} error={} stack={}', text, exc, pkdexc())
Ejemplo n.º 31
0
def test_parse_madx_file():
    from pykern import pkio, pkjson
    from pykern.pkunit import pkeq
    from sirepo.template import madx, madx_parser

    with pkunit.save_chdir_work():
        for name in ('particle_track', 'alba'):
            actual = madx_parser.parse_file(pkio.read_text(
                pkunit.data_dir().join(f'{name}.madx')))
            del actual['version']
            outfile = f'{name}.json'
            pkjson.dump_pretty(actual, outfile)
            expect = pkjson.load_any(pkunit.data_dir().join(outfile))
            pkeq(expect, actual)
Ejemplo n.º 32
0
def host_init(j2_ctx, host):
    from rsconf import db

    jf = _sasl_password_path(j2_ctx)
    if jf.check():
        with jf.open() as f:
            y = pkjson.load_any(f)
    else:
        y = pkcollections.Dict()
    u = _SASL_PASSWORD_PREFIX + host
    if not u in y:
        y[u] = db.random_string()
        pkjson.dump_pretty(y, filename=jf)
    return u, y[u]
Ejemplo n.º 33
0
def _cfg_json(value):
    from pykern import pkjson
    if isinstance(value, pkcollections.Dict):
        return value
    return pkjson.load_any(value)