Beispiel #1
0
def test_walk_tree():
    """Creates looks in data_dir"""
    with pkunit.save_chdir_work() as pwd:
        for f in ('d1/d7', 'd2/d3', 'd4/d5/d6'):
            pkio.mkdir_parent(f)
        expect = []
        for f in ['d1/d7/f1', 'd4/d5/f2', 'd2/d3/f3']:
            pkio.write_text(f, '')
            expect.append(py.path.local(f))
        assert sorted(expect) == list(pkio.walk_tree('.')), \
            'When walking tree, should only return files'
        assert [expect[2]] == list(pkio.walk_tree('.', 'f3')), \
            'When walking tree with file_re, should only return matching files'
        assert [expect[0]] == list(pkio.walk_tree('.', '^d1')), \
            'When walking tree with file_re, file to match does not include dir being searched'
Beispiel #2
0
def test_purge_users(monkeypatch):
    from pykern.pkunit import pkeq, pkok
    from pykern.pkdebug import pkdp
    from pykern import pkio
    from pykern import pkconfig
    from sirepo import srunit
    srunit.init_auth_db(sim_types='myapp')

    from sirepo.pkcli import admin
    from sirepo import simulation_db
    from sirepo import auth_db
    import datetime

    res = admin.purge_users(days=1, confirm=False)
    pkeq([], res, '{}: no old users so empty')
    g = simulation_db.user_dir_name('*')
    dirs = list(pkio.sorted_glob(g))
    pkeq(1, len(dirs), '{}: expecting exactly one user dir', g)
    uid = dirs[0].basename
    #TODO(robnagler) really want the db to be created, but need
    #  a test oauth class.
    monkeypatch.setattr(auth_db, 'all_uids', lambda: [uid])
    for f in pkio.walk_tree(dirs[0]):
        f.setmtime(f.mtime() - 86400 * 2)
    res = admin.purge_users(days=1, confirm=False)
    pkeq([], res, '{}: all users registered so no deletes')
    monkeypatch.setattr(auth_db, 'all_uids', lambda: [])
    res = admin.purge_users(days=1, confirm=False)
    pkeq(dirs, res, '{}: no users registered so one delete', res)
    pkok(dirs[0].check(dir=True), '{}: nothing deleted', res)
    res = admin.purge_users(days=1, confirm=True)
    pkeq(dirs, res, '{}: no users registered so one delete', res)
    pkok(not dirs[0].check(dir=True), '{}: directory deleted', res)
Beispiel #3
0
def _dicom_files(dirname):
    files = PKDict(
        ctmap=PKDict(),
        rtdose=None,
        rtstruct=None,
        position=None,
        additional_rtdose=[],
    )
    for path in pkio.walk_tree(dirname):
        if not pkio.has_file_extension(str(path), 'dcm'):
            continue
        v = pydicom.dcmread(str(path),
                            specific_tags=[
                                'SOPClassUID',
                                'InstanceNumber',
                                'PatientPosition',
                            ])
        if v.SOPClassUID == _DICOM_CLASS.CT_IMAGE:
            files.ctmap[int(v.InstanceNumber)] = str(path)
            files.position = v.PatientPosition
        elif v.SOPClassUID == _DICOM_CLASS.RT_DOSE:
            if files.rtdose:
                files.additional_rtdose.append(str(path))
            else:
                files.rtdose = str(path)
        elif v.SOPClassUID == _DICOM_CLASS.RT_STRUCT:
            files.rtstruct = str(path)
    assert files.rtdose and files.rtstruct, 'Missing RTSTRUCT and/or RTDOSE'
    return files
Beispiel #4
0
def purge_users(days=180, confirm=False):
    """Remove old users from db which have not registered.

    Args:
        days (int): maximum days of untouched files (old is mtime > days)
        confirm (bool): delete the directories if True (else don't delete) [False]

    Returns:
        list: directories removed (or to remove if confirm)
    """
    days = int(days)
    assert days >= 1, \
        '{}: days must be a positive integer'
    server.init()

    uids = auth_db.all_uids()
    now = datetime.datetime.utcnow()
    to_remove = []
    for d in pkio.sorted_glob(simulation_db.user_dir_name('*')):
        if _is_src_dir(d):
            continue;
        if simulation_db.uid_from_dir_name(d) in uids:
            continue
        for f in pkio.walk_tree(d):
            if (now - now.fromtimestamp(f.mtime())).days <= days:
                break
        else:
            to_remove.append(d)
    if confirm:
        pkio.unchecked_remove(*to_remove)
    return to_remove
Beispiel #5
0
def test_purge_users(monkeypatch):
    from pykern.pkunit import pkeq, pkok
    from pykern.pkdebug import pkdp
    from pykern import pkio
    from pykern import pkconfig
    from sirepo import srunit
    srunit.init_user_db()

    from sirepo.pkcli import admin
    from sirepo import simulation_db
    from sirepo import api_auth
    import datetime

    res = admin.purge_users(days=1, confirm=False)
    pkeq([], res, '{}: no old users so empty')
    pkdp(simulation_db.user_dir_name('*'))
    g = simulation_db.user_dir_name('*')
    dirs = list(pkio.sorted_glob(g))
    pkeq(1, len(dirs), '{}: expecting exactly one user dir', g)
    uid = dirs[0].basename
    #TODO(robnagler) really want the db to be created, but need
    #  a test oauth class.
    monkeypatch.setattr(api_auth, 'all_uids', lambda: [uid])
    for f in pkio.walk_tree(dirs[0]):
        f.setmtime(f.mtime() - 86400 * 2)
    res = admin.purge_users(days=1, confirm=False)
    pkeq([], res, '{}: all users registered so no deletes')
    monkeypatch.setattr(api_auth, 'all_uids', lambda: [])
    res = admin.purge_users(days=1, confirm=False)
    pkeq(dirs, res, '{}: no users registered so one delete', res)
    pkok(dirs[0].check(dir=True), '{}: nothing deleted', res)
    res = admin.purge_users(days=1, confirm=True)
    pkeq(dirs, res, '{}: no users registered so one delete', res)
    pkok(not dirs[0].check(dir=True), '{}: directory deleted', res)
Beispiel #6
0
 def _check_file(exists=True):
     pkunit.pkeq(
         [_proprietary_file] if exists else [],
         [
             x.basename
             for x in pkio.walk_tree(fc.sr_user_dir(), _proprietary_file)
         ],
     )
Beispiel #7
0
def examples(app):
    files = pkio.walk_tree(
        pkresource.filename(_EXAMPLE_DIR_FORMAT.format(app)),
        re.escape(JSON_SUFFIX) + '$',
    )
    #TODO(robnagler) Need to update examples statically before build
    # and assert on build
    return [open_json_file(app, path=str(f)) for f in files]
Beispiel #8
0
def examples(app):
    files = pkio.walk_tree(
        pkresource.filename(_EXAMPLE_DIR_FORMAT.format(app)),
        re.escape(JSON_SUFFIX) + '$',
    )
    #TODO(robnagler) Need to update examples statically before build
    # and assert on build
    return [open_json_file(app, path=str(f)) for f in files]
Beispiel #9
0
def _init_local_files(values):
    v = values.rsconf_db
    r = v.db_d.join(LOCAL_SUBDIR)
    res = PKDict()
    for l in LEVELS[0], v.channel, v.host:
        d = r.join(l)
        for f in pkio.walk_tree(d):
            res['/' + d.bestrelpath(f)] = f
    return res
Beispiel #10
0
def examples(app):
    files = pkio.walk_tree(
        sirepo.sim_data.get_class(app).resource_path(_EXAMPLE_DIR),
        re.escape(JSON_SUFFIX) + '$',
    )
    #TODO(robnagler) Need to update examples statically before build
    # and assert on build
    # example data is not fixed-up to avoid performance problems when searching examples by name
    # fixup occurs during save_new_example()
    return [open_json_file(app, path=str(f), fixup=False) for f in files]
Beispiel #11
0
def examples(app):
    files = pkio.walk_tree(
        template_common.resource_dir(app).join(_EXAMPLE_DIR),
        re.escape(JSON_SUFFIX) + '$',
    )
    #TODO(robnagler) Need to update examples statically before build
    # and assert on build
    # example data is not fixed-up to avoid performance problems when searching examples by name
    # fixup occurs during save_new_example()
    return [open_json_file(app, path=str(f), fixup=False) for f in files]
Beispiel #12
0
def test_walk_tree_and_sorted_glob():
    """Looks in work_dir"""
    from pykern import pkunit
    from pykern import pkio

    with pkunit.save_chdir_work() as pwd:
        for f in ('d1/d7', 'd2/d3', 'd4/d5/d6'):
            pkio.mkdir_parent(f)
        expect = []
        for f in ['d1/d7/f1', 'd4/d5/f2', 'd2/d3/f3']:
            pkio.write_text(f, '')
            expect.append(py.path.local(f))
        assert sorted(expect) == list(pkio.walk_tree('.')), \
            'When walking tree, should only return files'
        assert [expect[2]] == list(pkio.walk_tree('.', 'f3')), \
            'When walking tree with file_re, should only return matching files'
        assert [expect[0]] == list(pkio.walk_tree('.', '^d1')), \
            'When walking tree with file_re, file to match does not include dir being searched'
        assert pkio.sorted_glob('*[42]') == [py.path.local(f) for f in ('d2', 'd4')]
Beispiel #13
0
def _extract_series_frames(simulation, dicom_dir):
    #TODO(pjm): give user a choice between multiple study/series if present
    selected_series = None
    frames = {}
    dicom_dose = None
    rt_struct_path = None
    res = {
        'description': '',
    }
    for path in pkio.walk_tree(dicom_dir):
        if pkio.has_file_extension(str(path), 'dcm'):
            plan = dicom.read_file(str(path))
            if plan.SOPClassUID == _DICOM_CLASS['RT_STRUCT']:
                rt_struct_path = str(path)
            elif plan.SOPClassUID == _DICOM_CLASS['RT_DOSE']:
                res['dicom_dose'] = _summarize_rt_dose(simulation, plan)
                plan.save_as(_dose_dicom_filename(simulation))
            if plan.SOPClassUID != _DICOM_CLASS['CT_IMAGE']:
                continue
            orientation = _float_list(plan.ImageOrientationPatient)
            if not (_EXPECTED_ORIENTATION == orientation).all():
                continue
            if not selected_series:
                selected_series = plan.SeriesInstanceUID
                res['StudyInstanceUID'] = plan.StudyInstanceUID
                res['PixelSpacing'] = plan.PixelSpacing
                if hasattr(plan, 'SeriesDescription'):
                    res['description'] = plan.SeriesDescription
            if selected_series != plan.SeriesInstanceUID:
                continue
            info = {
                'pixels': np.float32(plan.pixel_array),
                'shape': plan.pixel_array.shape,
                'ImagePositionPatient': _string_list(plan.ImagePositionPatient),
                'ImageOrientationPatient': _float_list(plan.ImageOrientationPatient),
                'PixelSpacing': _float_list(plan.PixelSpacing),
            }
            for f in ('FrameOfReferenceUID', 'StudyInstanceUID', 'SeriesInstanceUID', 'SOPInstanceUID'):
                info[f] = getattr(plan, f)
            z = _frame_id(info['ImagePositionPatient'][2])
            info['frameId'] = z
            if z in frames:
                raise RuntimeError('duplicate frame with z coord: {}'.format(z))
            _scale_pixel_data(plan, info['pixels'])
            frames[z] = info
    if not selected_series:
        raise RuntimeError('No series found with {} orientation'.format(_EXPECTED_ORIENTATION))
    if rt_struct_path:
        res['regionsOfInterest'] = _summarize_rt_structure(simulation, dicom.read_file(rt_struct_path), frames.keys())
    sorted_frames = []
    res['frames'] = sorted_frames
    for z in sorted(_float_list(frames.keys())):
        sorted_frames.append(frames[_frame_id(z)])
    return res
Beispiel #14
0
def _extract_series_frames(simulation, dicom_dir):
    #TODO(pjm): give user a choice between multiple study/series if present
    selected_series = None
    frames = {}
    dicom_dose = None
    rt_struct_path = None
    res = {
        'description': '',
    }
    for path in pkio.walk_tree(dicom_dir):
        if pkio.has_file_extension(str(path), 'dcm'):
            plan = dicom.read_file(str(path))
            if plan.SOPClassUID == _DICOM_CLASS['RT_STRUCT']:
                rt_struct_path = str(path)
            elif plan.SOPClassUID == _DICOM_CLASS['RT_DOSE']:
                res['dicom_dose'] = _summarize_rt_dose(simulation, plan)
                plan.save_as(_dose_dicom_filename(simulation))
            if plan.SOPClassUID != _DICOM_CLASS['CT_IMAGE']:
                continue
            orientation = _float_list(plan.ImageOrientationPatient)
            if not (_EXPECTED_ORIENTATION == orientation).all():
                continue
            if not selected_series:
                selected_series = plan.SeriesInstanceUID
                res['StudyInstanceUID'] = plan.StudyInstanceUID
                res['PixelSpacing'] = plan.PixelSpacing
                if hasattr(plan, 'SeriesDescription'):
                    res['description'] = plan.SeriesDescription
            if selected_series != plan.SeriesInstanceUID:
                continue
            info = {
                'pixels': np.float32(plan.pixel_array),
                'shape': plan.pixel_array.shape,
                'ImagePositionPatient': _string_list(plan.ImagePositionPatient),
                'ImageOrientationPatient': _float_list(plan.ImageOrientationPatient),
                'PixelSpacing': _float_list(plan.PixelSpacing),
            }
            for f in ('FrameOfReferenceUID', 'StudyInstanceUID', 'SeriesInstanceUID', 'SOPInstanceUID'):
                info[f] = getattr(plan, f)
            z = _frame_id(info['ImagePositionPatient'][2])
            info['frameId'] = z
            if z in frames:
                raise RuntimeError('duplicate frame with z coord: {}'.format(z))
            _scale_pixel_data(plan, info['pixels'])
            frames[z] = info
    if not selected_series:
        raise RuntimeError('No series found with {} orientation'.format(_EXPECTED_ORIENTATION))
    if rt_struct_path:
        res['regionsOfInterest'] = _summarize_rt_structure(simulation, dicom.read_file(rt_struct_path), frames.keys())
    sorted_frames = []
    res['frames'] = sorted_frames
    for z in sorted(_float_list(frames.keys())):
        sorted_frames.append(frames[_frame_id(z)])
    return res
Beispiel #15
0
def _find(paths):
    from pykern import pkio
    import re

    i = re.compile(r'(?:_work|_data)/')
    res = []
    cwd = pkio.py_path()
    for t in paths or ('tests', ):
        t = pkio.py_path(t)
        if t.check(file=True):
            res.append(str(cwd.bestrelpath(t)))
            continue
        for p in pkio.walk_tree(t, re.compile(r'_test\.py$')):
            p = str(cwd.bestrelpath(p))
            if not i.search(p):
                res.append(p)
    return res
Beispiel #16
0
def init_tree(name, author, author_email, description, license, url):
    """Setup a project tree with: docs, tests, etc., and checkin to git.

    Creates: setup.py, index.rst, project dir, <name>_console.py, etc.
    Overwrites files if they exist without checking.

    Args:
        name (str): short name of the project, e.g. ``pykern``.
        author (str): copyright holder, e.g. ``RadiaSoft LLC``
        author_email (str): how to reach author, e.g. ``[email protected]``
        description (str): one-line summary of project
        license (str): url of license
        url (str): website for project, e.g. http://pykern.org
    """
    assert os.path.isdir('.git'), \
        'Must be run from the root directory of the repo'
    assert not os.path.isdir(name), \
        '{}: already exists, only works on fresh repos'.format(name)
    assert name == py.path.local().basename, \
        '{}: name must be the name of the current directory'.format(name)
    license = license.lower()
    base = pkresource.filename('projex')
    values = copy.deepcopy(DEFAULTS)
    values.update({
        'name': name,
        'author': author,
        'description': description,
        'author_email': author_email,
        'url': url,
        'license': _license(license, 0),
        'classifier_license': _license(license, 1),
    })
    values['copyright_license_rst'] = values['copyright_license_rst'].format(**values)
    suffix_re = r'\.jinja$'
    for src in pkio.walk_tree(base, file_re=suffix_re):
        dst = py.path.local(src).relto(str(base))
        dst = dst.replace('projex', name).replace('dot-', '.')
        dst = re.sub(suffix_re, '', dst)
        pkio.mkdir_parent_only(dst)
        _render(src, values, output=dst)
    src = py.path.local(pkresource.filename('projex-licenses'))
    src = src.join(license + '.jinja')
    _render(src, values, output='LICENSE')
Beispiel #17
0
def purge_guest_users(days=180, confirm=False):
    """Remove old users from db which have not registered.

    Args:
        days (int): maximum days of untouched files (old is mtime > days)
        confirm (bool): delete the directories if True (else don't delete) [False]

    Returns:
        (list, list): dirs and uids of removed guest users (or to remove if confirm)
    """

    days = int(days)
    assert days >= 1, \
        '{}: days must be a positive integer'
    server.init()
    from sirepo import srtime

    guest_uids = auth.guest_uids()
    now = srtime.utc_now()
    dirs_and_uids = {}

    for d in pkio.sorted_glob(simulation_db.user_dir_name().join('*')):
        uid = simulation_db.uid_from_dir_name(d)
        if _is_src_dir(d):
            continue
        if uid not in guest_uids:
            continue
        for f in pkio.walk_tree(d):
            if (now - now.fromtimestamp(f.mtime())).days <= days:
                break
        else:

            dirs_and_uids[d] = uid
    if confirm:
        pkio.unchecked_remove(*dirs_and_uids.keys())
        auth_db.UserRegistration.delete_all_for_column_by_values(
            'uid', dirs_and_uids.values())

    return dirs_and_uids
Beispiel #18
0
def test_purge_users(monkeypatch):
    from pykern.pkunit import pkeq, pkok
    from pykern.pkdebug import pkdp
    from pykern import pkio
    from sirepo import sr_unit

    # Need to initialize first
    sr_unit.init_user_db()

    from sirepo.pkcli import admin
    from sirepo import simulation_db
    from sirepo import server
    import datetime

    #TODO(pjm): tried pkconfig.reset_state_for_testing() but couldn't override bool to False
    server.cfg.oauth_login = False

    res = admin.purge_users(days=1, confirm=False)
    pkeq([], res, '{}: no old users so empty')
    pkdp(simulation_db.user_dir_name('*'))
    g = simulation_db.user_dir_name('*')
    dirs = list(pkio.sorted_glob(g))
    pkeq(1, len(dirs), '{}: expecting exactly one user dir', g)
    uid = dirs[0].basename
    #TODO(robnagler) really want the db to be created, but need
    #  a test oauth class.
    monkeypatch.setattr(server, 'all_uids', lambda: [uid])
    for f in pkio.walk_tree(dirs[0]):
        f.setmtime(f.mtime() - 86400 * 2)
    res = admin.purge_users(days=1, confirm=False)
    pkeq([], res, '{}: all users registered so no deletes')
    monkeypatch.setattr(server, 'all_uids', lambda: [])
    res = admin.purge_users(days=1, confirm=False)
    pkeq(dirs, res, '{}: no users registered so one delete', res)
    pkok(dirs[0].check(dir=True), '{}: nothing deleted', res)
    res = admin.purge_users(days=1, confirm=True)
    pkeq(dirs, res, '{}: no users registered so one delete', res)
    pkok(not dirs[0].check(dir=True), '{}: directory deleted', res)
Beispiel #19
0
def _search_and_parse(dirs):
    files = []
    for d in dirs:
        files.extend(list(pkio.walk_tree(d, file_re=r'index.txt$')))
    files.reverse()
    res = {
        'images': [],
        'links': [],
        'titles': [],
        'words': {},
    }
    for f, fi in zip(files, range(len(files))):
        s = _DIR_RE.search(f.dirname)
        assert s, '{}: non-date dirname'.format(f)
        root = py.path.local(s.group(1))
        y, m, d = s.group(2, 3, 4)
        _add_words(res['words'], fi, [y, y + m, y + m + d])
        w, i, t = _index_parse(str(f), fi)
        res['links'].append(root.bestrelpath(py.path.local(f.dirname)))
        res['images'].append(_thumb(i))
        res['titles'].append('{}/{}/{}{}'.format(m, d, y, t))
        _add_words(res['words'], fi, w)
    return res
Beispiel #20
0
def _search_and_parse(dirs):
    files = []
    for d in dirs:
        files.extend(list(pkio.walk_tree(d, file_re=r'(?:^|/)index.txt$')))
    files.reverse()
    res = {
        'images': [],
        'links': [],
        'titles': [],
        'words': {},
    }
    for f, fi in zip(files, range(len(files))):
        s = _DIR_RE.search(f.dirname)
        assert s, '{}: non-date dirname'.format(f)
        root = py.path.local(s.group(1))
        y, m, d = s.group(2, 3, 4)
        _add_words(res['words'], fi, [y, y + m, y + m + d])
        w, i, t = _index_parse(str(f), fi)
        res['links'].append(root.bestrelpath(py.path.local(f.dirname)))
        res['images'].append(_thumb(i))
        res['titles'].append('{}/{}/{}{}'.format(m, d, y, t))
        _add_words(res['words'], fi, w)
    return res
Beispiel #21
0
def purge_users(days=180, confirm=False):
    """Remove old users from db which have not registered.

    Args:
        days (int): maximum days of untouched files (old is mtime > days)
        confirm (bool): delete the directories if True (else don't delete) [False]

    Returns:
        list: directories removed (or to remove if confirm)
    """
    from pykern import pkio
    from sirepo import server
    from sirepo import simulation_db
    from sirepo import api_auth
    import datetime

    days = int(days)
    assert days >= 1, \
        '{}: days must be a positive integer'
    server.init()
    uids = api_auth.all_uids()
    now = datetime.datetime.utcnow()
    to_remove = []
    for d in pkio.sorted_glob(simulation_db.user_dir_name('*')):
        if _is_src_dir(d):
            continue;
        #TODO(pjm): need to skip special "src" user
        if simulation_db.uid_from_dir_name(d) in uids:
            continue
        for f in pkio.walk_tree(d):
            if (now - now.fromtimestamp(f.mtime())).days <= days:
                break
        else:
            to_remove.append(d)
    if confirm:
        pkio.unchecked_remove(*to_remove)
    return to_remove
Beispiel #22
0
def _h5_file_list(run_dir, model_name):
    return pkio.walk_tree(
        run_dir.join('diags/xzsolver/hdf5' if model_name == 'currentAnimation' else 'diags/fields/electric'),
        r'\.h5$',
    )
Beispiel #23
0
def upgrade_runner_to_job_db(db_dir):
    import sirepo.auth
    from pykern import pkio
    from pykern.pkcollections import PKDict
    from pykern.pkdebug import pkdp, pkdlog, pkdexc
    from sirepo import job
    from sirepo import simulation_db
    from sirepo import sim_data
    from sirepo import util
    import pykern.pkio
    import sirepo.template

    def _add_compute_status(run_dir, data):
        p = run_dir.join(job.RUNNER_STATUS_FILE)
        data.pkupdate(
            lastUpdateTime=int(p.mtime()),
            status=pkio.read_text(p),
        )

    def _add_parallel_status(in_json, sim_data, run_dir, data):
        t = sirepo.template.import_module(data.simulationType)
        # pksetdefault emulates job_cmd._background_percent_complete
        data.parallelStatus = PKDict(
            t.background_percent_complete(
                sim_data.parse_model(in_json),
                run_dir,
                False,
            )).pksetdefault(
                lastUpdateTime=data.lastUpdateTime,
                frameCount=0,
                percentComplete=0.0,
            )

    def _create_supervisor_state_file(run_dir):
        try:
            i, t = _load_in_json(run_dir)
        except Exception as e:
            if pykern.pkio.exception_is_not_found(e):
                return
            raise
        u = simulation_db.uid_from_dir_name(run_dir)
        sirepo.auth.cfg.logged_in_user = u
        c = sim_data.get_class(i.simulationType)
        d = PKDict(
            computeJid=c.parse_jid(i, u),
            computeJobHash=c.compute_job_hash(
                i),  # TODO(e-carlin): Another user cookie problem
            computeJobSerial=t,
            computeJobStart=t,
            error=None,
            history=[],
            isParallel=c.is_parallel(i),
            simulationId=i.simulationId,
            simulationType=i.simulationType,
            uid=u,
        )
        d.pkupdate(
            jobRunMode=job.PARALLEL if d.isParallel else job.SEQUENTIAL,
            nextRequestSeconds=c.poll_seconds(i),
        )
        _add_compute_status(run_dir, d)
        if d.status not in (job.COMPLETED, job.CANCELED):
            return

        if d.isParallel:
            _add_parallel_status(i, c, run_dir, d)
        util.json_dump(d, path=_db_file(d.computeJid))

    def _db_file(computeJid):
        return db_dir.join(computeJid + '.json')

    def _load_in_json(run_dir):
        p = simulation_db.json_filename(
            sirepo.template.template_common.INPUT_BASE_NAME, run_dir)
        c = simulation_db.read_json(p)
        return c, c.computeJobCacheKey.computeJobStart if \
            c.get('computejobCacheKey') else \
            int(p.mtime())

    c = 0
    db_dir = pkio.py_path(db_dir)
    pkio.mkdir_parent(db_dir)
    for f in pkio.walk_tree(
            simulation_db.user_dir_name(),
            '^(?!.*src/).*/{}$'.format(sirepo.job.RUNNER_STATUS_FILE),
    ):
        try:
            _create_supervisor_state_file(pkio.py_path(f.dirname))
        except Exception as e:
            c += 1
            k = PKDict(run_dir=f)
            s = 'run_dir={run_dir}'
            if c < 50:
                k.stack = pkdexc()
                s += ' stack={stack}'
            else:
                k.error = getattr(e, 'args', []) or e
                s += ' error={error}'
            pkdlog(s, **k)
Beispiel #24
0
def default_command():
    """Find all dirs and try to fix"""
    for f in pkio.walk_tree('.', file_re=r'index.txt$'):
        with pkio.save_chdir(f.dirname):
            _one_dir()
Beispiel #25
0
 def _check_run_dir(should_exist=0):
     f = pkio.walk_tree(fc.sr_user_dir(), file_re=m)
     pkunit.pkeq(should_exist, len(f), 'incorrect file count')
Beispiel #26
0
def _h5_file_list(run_dir):
    return pkio.walk_tree(
        run_dir.join('hdf5'),
        r'\.h5$',
    )
Beispiel #27
0
def examples(app):
    files = pkio.walk_tree(
        pkresource.filename(_EXAMPLE_DIR_FORMAT.format(app)),
        re.escape(JSON_SUFFIX) + '$',
    )
    return [open_json_file(app, str(f)) for f in files]
Beispiel #28
0
def _h5_file_list(run_dir):
    return pkio.walk_tree(
        run_dir.join('hdf5'),
        r'\.h5$',
    )
Beispiel #29
0
def default_command():
    import rsconf.component
    import rsconf.component.rsconf
    import rsconf.pkcli.tls

    root_d = db.cfg.root_d
    if root_d.check():
        return '{}: already exists'.format(root_d)
    srv = pkio.mkdir_parent(root_d.join(db.SRV_SUBDIR))

    def _sym(old, new_base=None):
        old = pkio.py_path(old)
        if not new_base:
            new_base = old.basename
        assert old.check(), \
            '{}: does not exist'.format(old)
        srv.join(new_base).mksymlinkto(old, absolute=False)

    # ssh-keygen -q -N '' -C rsconf -t rsa -b 4096 -f /var/tmp/foo
    # -- don't need this
    db_d = pkio.mkdir_parent(root_d.join(db.DB_SUBDIR))
    secret_d = pkio.mkdir_parent(db_d.join(db.SECRET_SUBDIR))
    nginx_d = pkio.mkdir_parent(root_d.join(NGINX_SUBDIR))
    boot_hdb = pkcollections.Dict(rsconf_db=pkcollections.Dict(
        secret_d=secret_d,
        channel='dev',
    ))
    j2_ctx = pkcollections.Dict(
        all_host='v9.radia.run',
        group=grp.getgrgid(os.getgid())[0],
        host='v4.radia.run',
        master='v3.radia.run',
        port=2916,
        root_d=root_d,
        srv_d=str(srv),
        uid=os.getuid(),
        user=pwd.getpwuid(os.getuid())[0],
        worker5_host='v5.radia.run',
        worker6_host='v6.radia.run',
    )
    hosts = [h for h in j2_ctx.values() if str(h).endswith('.radia.run')]
    # bootstrap
    j2_ctx.update(boot_hdb)
    j2_ctx.rsconf_db.http_host = 'http://{}:{}'.format(j2_ctx.master,
                                                       j2_ctx.port)
    j2_ctx.bkp = pkcollections.Dict(primary=j2_ctx.host)
    j2_ctx.passwd_f = rsconf.component.rsconf.passwd_secret_f(j2_ctx)
    for h in hosts:
        _add_host(j2_ctx, srv, h)
    _sym('~/src/radiasoft/download/bin/install.sh', 'index.html')
    _sym(pkresource.filename('rsconf/rsconf.sh'), 'rsconf.sh')
    dev_d = pkio.py_path(pkresource.filename('dev'))
    for f in pkio.walk_tree(dev_d):
        if str(f).endswith('~') or str(f).startswith('#'):
            continue
        x = str(f.relto(dev_d))
        if not ('local/' in x and x.endswith('.sh.jinja')):
            x = re.sub('.jinja$', '', x)
        dst = root_d.join(x)
        pkio.mkdir_parent_only(dst)
        if f.basename == dst.basename:
            f.copy(dst)
        else:
            pkjinja.render_file(f, j2_ctx, output=dst, strict_undefined=True)
    n = []
    for e in 'rpm', 'proprietary':
        d = pkio.py_path(root_d.dirname).join(e)
        pkio.mkdir_parent(d)
        root_d.join(e).mksymlinkto(d, absolute=False)
        n.append(str(d))
    subprocess.check_call(['bash', str(secret_d.join('setup_dev.sh')), *n], )
    # dev only, really insecure, but makes consistent builds easy
    _sym('~/src/radiasoft')
    _sym('~/src/biviosoftware')
    for h in hosts:
        # needed to be able to talk setup certs for registry so we can
        # pull private images from all hosts. Only used in dev, because
        # private registry doesn't protect against pushes from these hosts.
        if h != j2_ctx.master:
            subprocess.check_call(
                ['rsconf', 'host', 'init_docker_registry', h])
    tls_d = secret_d.join(rsconf.component.TLS_SECRET_SUBDIR)
    tls_d.ensure(dir=True)
    for h in (
            'jupyter.' + j2_ctx.all_host,
            'jupyter.' + j2_ctx.host,
            j2_ctx.all_host,
            j2_ctx.master,
            j2_ctx.worker5_host,
            j2_ctx.worker6_host,
    ):
        rsconf.pkcli.tls.gen_self_signed_crt(
            tls_d.join(h),
            h,
        )
    rsconf.pkcli.tls.gen_self_signed_crt(
        tls_d.join('star.' + j2_ctx.host),
        '*.' + j2_ctx.host,
        j2_ctx.host,
    )
Beispiel #30
0
def _h5_file_list(run_dir, model_name):
    return pkio.walk_tree(
        run_dir.join('diags/xzsolver/hdf5' if model_name ==
                     'currentAnimation' else 'diags/fields/electric'),
        r'\.h5$',
    )
Beispiel #31
0
def examples(app):
    files = pkio.walk_tree(
        pkresource.filename(_EXAMPLE_DIR_FORMAT.format(app)),
        re.escape(JSON_SUFFIX) + '$',
    )
    return [open_json_file(app, str(f)) for f in files]