Exemplo n.º 1
0
def open_json_file(simulation_type, path=None, sid=None):
    if not path:
        path = _simulation_data_file(simulation_type, sid)
    if not os.path.isfile(str(path)):
        global_sid = None
        if sid:
            user_copy_sid = _find_user_simulation_copy(simulation_type, sid)
            if find_global_simulation(simulation_type, sid):
                global_sid = sid
        if global_sid:
            return {
                'redirect': {
                    'simulationId': global_sid,
                    'userCopySimulationId': user_copy_sid,
                },
            }
        raise werkzeug.exceptions.NotFound()
    try:
        with open(str(path)) as f:
            data = json.load(f)
            # ensure the simulationId matches the path
            if sid:
                data['models']['simulation']['simulationId'] = _sid_from_path(path)
            return fixup_old_data(simulation_type, data)
    except:
        pkdp('File: {}', path)
        raise
Exemplo n.º 2
0
def test_purge_users(monkeypatch):
    from pykern.pkunit import pkeq, pkok
    from pykern.pkdebug import pkdp
    from pykern import pkio
    from pykern import pkconfig
    from sirepo import srunit
    srunit.init_user_db()

    from sirepo.pkcli import admin
    from sirepo import simulation_db
    from sirepo import api_auth
    import datetime

    res = admin.purge_users(days=1, confirm=False)
    pkeq([], res, '{}: no old users so empty')
    pkdp(simulation_db.user_dir_name('*'))
    g = simulation_db.user_dir_name('*')
    dirs = list(pkio.sorted_glob(g))
    pkeq(1, len(dirs), '{}: expecting exactly one user dir', g)
    uid = dirs[0].basename
    #TODO(robnagler) really want the db to be created, but need
    #  a test oauth class.
    monkeypatch.setattr(api_auth, 'all_uids', lambda: [uid])
    for f in pkio.walk_tree(dirs[0]):
        f.setmtime(f.mtime() - 86400 * 2)
    res = admin.purge_users(days=1, confirm=False)
    pkeq([], res, '{}: all users registered so no deletes')
    monkeypatch.setattr(api_auth, 'all_uids', lambda: [])
    res = admin.purge_users(days=1, confirm=False)
    pkeq(dirs, res, '{}: no users registered so one delete', res)
    pkok(dirs[0].check(dir=True), '{}: nothing deleted', res)
    res = admin.purge_users(days=1, confirm=True)
    pkeq(dirs, res, '{}: no users registered so one delete', res)
    pkok(not dirs[0].check(dir=True), '{}: directory deleted', res)
Exemplo n.º 3
0
def app_run():
    data = _json_input()
    sid = simulation_db.parse_sid(data)
    err = _start_simulation(data).run_and_read()
    run_dir = simulation_db.simulation_run_dir(data)
    if err:
        pkdp('error: sid={}, dir={}, out={}', sid, run_dir, err)
        return flask.jsonify({
            'error': _error_text(err),
            'simulationId': sid,
        })
    return pkio.read_text(run_dir.join('out{}'.format(simulation_db.JSON_SUFFIX)))
Exemplo n.º 4
0
def _assert_unicode(d, prefix=None):
    if d.children:
        for k in d:
            v = d[k]
            pkdp('{} {}', k, v)
            p = '{}.{}'.format(prefix, k) if prefix else k
            # TODO(robnagler) breaks with PY3
            assert isinstance(k, unicode), \
                '{}: key is not unicode'.format(p)
            _assert_unicode(v, p)
    elif type(d) == str:
        assert isinstance(d, unicode), \
            '{}: key is not unicode'.format(prefix)
Exemplo n.º 5
0
def test_declarations():
    """Verify a couple of values exist"""
    d = rt_params.declarations('srw')
    pkdp(d['precision']['spectral_flux'].children)
    assert d['undulator']['period_len'].units == 'm', \
        'Undulator period length units should be centimeters'
    assert d['precision']['spectral_flux']['flux_calculation'].py_type \
        == srw_enums.FluxCalculation, \
        'Flux Calculation type should be srw_enums.Flux'
    pkdp(d['precision']['spectral_flux'].values()[3])
    l = list(iter(d['precision']['spectral_flux'].values()))
    assert 'Azimuthal Integration Precision' == l[3].label, \
        'Result should be ordered'
    _assert_unicode(d)
Exemplo n.º 6
0
def iterate_simulation_datafiles(simulation_type, op, search=None):
    res = []
    for path in glob.glob(
        str(simulation_dir(simulation_type).join('*', SIMULATION_DATA_FILE)),
    ):
        path = py.path.local(path)
        try:
            data = open_json_file(simulation_type, path)
            if search and not _search_data(data, search):
                continue
            op(res, path, data)
        except ValueError:
            pkdp('unparseable json file: {}', path)
    return res
Exemplo n.º 7
0
def app_run_background():
    data = _json_input()
    sid = simulation_db.parse_sid(data)
    #TODO(robnagler) race condition. Need to lock the simulation
    if cfg.job_queue.is_running(sid):
        #TODO(robnagler) return error to user if in different window
        pkdp('ignoring second call to runBackground: {}'.format(sid))
        return '{}'
    status = data['models']['simulationStatus']
    status['state'] = 'running'
    status['startTime'] = int(time.time())
    _start_simulation(data, run_async=True)
    return flask.jsonify({
        'state': status['state'],
        'startTime': status['startTime'],
    })
Exemplo n.º 8
0
 def run_and_read(self):
     self.start()
     self.join(self.timeout)
     try:
         self.process.kill()
         pkdp('Timeout: cmd={}', self.cmd)
         # Thread should exit, but make sure
         self.join(2)
         return self.out + '\nError: simulation took too long'
     except OSError as e:
         if e.errno != errno.ESRCH:
             raise
     if self.process.returncode != 0:
         pkdp('Error: cmd={}, returncode={}', self.cmd, self.process.returncode)
         return self.out + '\nError: simulation failed'
     return None
Exemplo n.º 9
0
 def kill(cls, sid):
     from celery.exceptions import TimeoutError
     with cls._lock:
         res = cls._async_result(sid)
         if not res:
             return
         pkdp('Killing: tid={} sid={}', res.task_id, sid)
     try:
         res.revoke(terminate=True, wait=True, timeout=1, signal='SIGTERM')
     except TimeoutError as e:
         res.revoke(terminate=True, signal='SIGKILL')
     with cls._lock:
         try:
             del cls._task[sid]
             pkdp('Deleted: sid={}', sid)
         except KeyError:
             pass
Exemplo n.º 10
0
def test_pkdp(capsys):
    """Basic output and return with `pkdp`"""
    from pykern.pkdebug import pkdp, init
    init()
    assert 333 == pkdp(333)
    out, err = capsys.readouterr()
    assert str(333) in err, \
        'When pkdp called, arg chould be converted to str,'
Exemplo n.º 11
0
def test_init(capsys):
    from pykern import pkunit
    f = pkunit.empty_work_dir().join('f1')
    from pykern.pkdebug import pkdp, init
    init(output=f)
    pkdp('init1')
    out, err = capsys.readouterr()
    assert '' == err, \
        'When output is a file name, nothing goes to err'
    from pykern import pkio
    assert 'init1\n' in pkio.read_text(f), \
        'File output should contain msg'
    init(output=None, want_pid_time=True)
    pkdp('init2')
    out, err = capsys.readouterr()
    assert re.search(r'\w{3} .\d \d\d:\d\d:\d\d +\d+ +\d+ ', err), \
        'When output has time, matches regex'
Exemplo n.º 12
0
def import_python(code, tmp_dir, lib_dir, user_filename=None):
    """Converts script_text into json and stores as new simulation.

    Avoids too much data back to the user in the event of an error.
    This could be a potential security issue, because the script
    could be used to probe the system.

    Args:
        simulation_type (str): always "srw", but used to find lib dir
        code (str): Python code that runs SRW
        user_filename (str): uploaded file name for log

    Returns:
        error: string containing error or None
        dict: simulation data
    """
    error = 'Import failed: error unknown'
    script = None
    try:
        with pkio.save_chdir(tmp_dir):
            # This string won't show up anywhere
            script = pkio.write_text('in.py', code)
            o = SRWParser(
                script,
                lib_dir=py.path.local(lib_dir),
                user_filename=user_filename,
            )
            return None, o.data
    except Exception as e:
        lineno = _find_line_in_trace(script) if script else None
        # Avoid
        pkdp(
            'Error: {}; exception={}; script={}; filename={}; stack:\n{}',
            error,
            e,
            script,
            user_filename,
            traceback.format_exc(),
        )
        error = 'Error on line {}: {}'.format(lineno or '?', str(e)[:50])
    return error, None
Exemplo n.º 13
0
 def kill(cls, sid):
     self = None
     with cls._lock:
         try:
             self = cls._process[sid]
         except KeyError:
             return
         #TODO(robnagler) will this happen?
         if self.in_kill:
             return
         self.in_kill = True
     pkdp('Killing: pid={} sid={}', self.pid, self.sid)
     sig = signal.SIGTERM
     for i in range(3):
         try:
             os.kill(self.pid, sig)
             time.sleep(1)
             pid, status = os.waitpid(self.pid, os.WNOHANG)
             if pid == self.pid:
                 pkdp('waitpid: pid={} status={}', pid, status)
                 break
             sig = signal.SIGKILL
         except OSError:
             # Already reaped(?)
             break
     with cls._lock:
         self.in_kill = False
         try:
             del self._process[self.sid]
             pkdp('Deleted: sid={}', self.sid)
         except KeyError:
             pass
Exemplo n.º 14
0
def extract_field_report(field, coordinate, mode, dfile, iteration):
    pkdp([field, coordinate, mode, iteration])
    fields = dfile['data/{}/fields'.format(iteration)]
    if field == 'rho' :
        dset = fields['rho']
        coordinate = ''
    else:
        dset = fields['{}/{}'.format(field, coordinate)]
    F = np.flipud(np.array(dset[mode,:,:]).T)
    Nr, Nz = F.shape[0], F.shape[1]
    dr = fields[field].attrs['gridSpacing'][0]
    dz = fields[field].attrs['gridSpacing'][1]
    zmin = fields[field].attrs['gridGlobalOffset'][1]
    extent = np.array([zmin-0.5*dz, zmin+0.5*dz+dz*Nz, 0., (Nr+1)*dr])
    return {
        'x_range': [extent[0], extent[1], len(F[0])],
        'y_range': [extent[2], extent[3], len(F)],
        'x_label': 'z [m]',
        'y_label': 'r [m]',
        'title': "{} {} in the mode {} at {}".format(
            field, coordinate, _MODE_TEXT[str(mode)], _iteration_title(dfile, iteration)),
        'z_matrix': np.flipud(F).tolist(),
    }
Exemplo n.º 15
0
def _run_warp():
    with open('in.json') as f:
        data = json.load(f)

    field = data['models']['laserPreviewReport']['field']
    coordinate = data['models']['laserPreviewReport']['coordinate']
    mode = int(data['models']['laserPreviewReport']['mode'])
    exec(pkio.read_text('warp_parameters.py'), locals(), locals())
    iteration = 0

    doit = True
    while(doit):
        step(50)
        iteration += 50
        pkdp(top.zgrid)
        pkdp(w3d.zmmin)
        doit = ( w3d.zmmin + top.zgrid < 0 )

    dfile = h5py.File('hdf5/data' + str(iteration).zfill(8) + '.h5', "r")
    res = extract_field_report(field, coordinate, mode, dfile, iteration)

    with open ('out.json', 'w') as f:
        json.dump(res, f)
Exemplo n.º 16
0
def test_1():
    p = srw_multi_particle.simulate(_params('sample'))
    e_p, I_rad = _results(
        [p.stkF.mesh.eStart, p.stkF.mesh.eFin, p.stkF.mesh.ne],
        p.stkF.arS,
    )
    maxI = max(I_rad)
    pkdp('Spectral Amplitude: {:g} ph/s/mrad2', maxI)
    maxima_s = _maxima(I_rad, 3)
    pkdp('maxima: {}', maxima_s)
    z_distance, x_trajectory = _results(
        p.ctMesh,
        p.partTraj.arX,
    )
    minX = min(x_trajectory)
    maxX = max(x_trajectory)
    minZ = min(z_distance)
    maxZ = max(z_distance)
    pkdp('Length of ID: {:g} m', maxZ - minZ)
    pkdp('Oscillation Amplitude: {:g} mm', (maxX - minX)/2)
    L_trajectory = _path_length(z_distance, x_trajectory)
    pkdp('Length of Trajectory: {:g} m', L_trajectory)

    plt.figure()
    plt.plot(e_p, I_rad)
    for i in maxima_s:
	plt.scatter(e_p[i], I_rad[i], color='red')
    plt.grid()
    plt.show()
    plt.clf()
    plt.plot(z_distance,x_trajectory,'.b',linestyle='-')
    maxima_t = _maxima(x_trajectory, 10)
    for i in maxima_t:
	plt.scatter(z_distance[i], x_trajectory[i], color='red')
    plt.grid()
    plt.show()
    plt.clf()
Exemplo n.º 17
0
def _thumb(image, force):
    """Returns larger size"""
    for size, quality in ('50', '25'), ('200', '50'):
        t = re.sub(r'\w+$', 'jpg', os.path.join(size, image))
        if force or not os.path.exists(t):
            d = pkio.mkdir_parent(py.path.local(t).dirname)
            try:
                subprocess.check_call([
                    'convert',
                    '-thumbnail',
                    'x' + size,
                    '-quality',
                    quality + '%',
                    '-background',
                    'white',
                    '-alpha',
                    'remove',
                    image + '[0]',
                    t,
                ])
            except:
                pkdp('dir={}', d)
                raise
    return t
Exemplo n.º 18
0
def test_ipython():
    import pykern.pkdebug
    from pykern.pkdebug import pkdp
    pykern.pkdebug.init(output=None)
    # Overwrite the _ipython_write method. This doesn't test how ipython is
    # running. We'll do that separately
    save = []
    def _write(msg):
        save.append(msg)
    try:
        pykern.pkdebug._ipython_write = _write
        pkdp('abcdefgh')
        assert 'abcdefgh' in save[0], \
            'When _ipython_write is set, should be called if no output'
    finally:
        pykern.pkdebug._ipython_write = None
    import subprocess
    try:
        p = subprocess.Popen(
            ['ipython',  '--colors', 'NoColor', '-c','from pykern.pkdebug import pkdp; pkdp("abcdef")'],
            stdout=subprocess.PIPE,
            stderr=subprocess.PIPE,
        )
        # Not a brilliant test, but does demonstrate that write_err works
        assert '<module> abcdef' in p.stderr.read(), \
            'When in IPython, pkdp() should output to stdout'
        # We make this rigid, because we want to know when IPython interpreter changes
        o = p.stdout.read()
        assert re.search("Out\\[1\\]: \n?'abcdef'", o), \
            'IPython pkdp() is evaluated and written to stdout {}'.format(o)

    except OSError as e:
        # If we don't have IPython, then ignore error
        import errno
        if e.errno != errno.ENOENT:
            reraise
Exemplo n.º 19
0
 def _create_daemon(self):
     """Detach a process from the controlling terminal and run it in the
     background as a daemon.
     """
     try:
         pid = os.fork()
     except OSError as e:
         pkdp('fork OSError: {} ({})', e.strerror, e.errno)
         reraise
     if pid != 0:
         pkdp('Started: pid={} sid={} cmd={}', pid, self.sid, self.cmd)
         return pid
     try:
         os.chdir(str(self.run_dir))
         os.setsid()
         import resource
         maxfd = resource.getrlimit(resource.RLIMIT_NOFILE)[1]
         if (maxfd == resource.RLIM_INFINITY):
             maxfd = 1024
         for fd in range(0, maxfd):
             try:
                 os.close(fd)
             except OSError:
                 pass
         sys.stdin = open('background.log', 'a+')
         assert sys.stdin.fileno() == 0
         os.dup2(0, 1)
         sys.stdout = os.fdopen(1, 'a+')
         os.dup2(0, 2)
         sys.stderr = os.fdopen(2, 'a+')
         pkdp('Starting: cmd={}', self.cmd)
         sys.stderr.flush()
         try:
             os.execvp(self.cmd[0], self.cmd)
         finally:
             pkdp('execvp error: {} ({})', e.strerror, e.errno)
             sys.exit(1)
     except BaseException as e:
         err = open(str(self.run_dir.join('background.log')), 'a')
         err.write('Error starting daemon: {}\n'.format(e))
         err.close()
         reraise
Exemplo n.º 20
0
 def sigchld_handler(cls, signum=None, frame=None):
     try:
         pid, status = os.waitpid(-1, os.WNOHANG)
         pkdp('waitpid: pid={} status={}', pid, status)
         with cls._lock:
             for self in cls._process.values():
                 if self.pid == pid:
                     del self._process[self.sid]
                     pkdp('Deleted: sid={}', self.sid)
                     return
     except OSError as e:
         if e.errno != errno.ECHILD:
             pkdp('waitpid OSError: {} ({})', e.strerror, e.errno)
Exemplo n.º 21
0
 def send(self, msg):
     pkdp(msg)
Exemplo n.º 22
0
def _one_dir():
    d = os.getcwd()
    pkdp('{}', d)

    def err(msg):
        pkdp('{}: {}'.format(d, msg))

    try:
        with open('index.txt') as f:
            lines = list(f)
    except:
        return
    images = set()
    bases = {}
    seen = set()
    for x in glob.glob('*.*'):
        m = rnpix.common.IMAGE_SUFFIX.search(x)
        if m:
            images.add(x)
            # There may be multiple but we are just using for anything
            # to match image bases only
            bases[m.group(1)] = x
    new_lines = []
    for l in lines:
        if re.search(r'^#\S+\.avi', l):
            # Fix previous avi bug
            l = l[1:]
        elif re.search(r'^#\d+_\d+\.jpg', l):
            l = l[1:].replace('_', '-', 1)
        elif l.startswith('#'):
            new_lines.append(l)
            continue
        m = _LINE_RE.search(l)
        if not m:
            if re.search(r'\S', l):
                err('{}: strange line, commenting'.format(l))
                new_lines.append('#' + l)
            err('{}: blank line, skipping'.format(l))
            continue
        i, t = m.group(1, 2)
        m = rnpix.common.IMAGE_SUFFIX.search(i)
        if not m:
            if i in bases:
                err('{}: substituting for {}'.format(i, bases[i]))
                i = bases[i]
                l = i + ' ' + t + '\n'
            else:
                err('{}: no such base or image, commenting'.format(i))
                new_lines.append('#' + l)
                continue
        if i in images:
            images.remove(i)
            seen.add(i)
        else:
            if i in seen:
                if t == '?':
                    err('{}: already seen no text, skipping'.format(i))
                    continue
                err('{}: already seen with text'.format(i))
            err('{}: no such image: text={}'.format(i, t))
            new_lines.append('#' + l)
            continue
        if not len(t):
            err('{}: no text, adding "?"'.format(l))
            l = i + ' ?\n'
        new_lines.append(l)

    if images:
        err('{}: extra images, append'.format(images))
        for i in images:
            new_lines.append(i + ' ?\n')
    if new_lines != lines:
        pkdp('writing: index.txt')
        os.rename('index.txt', 'index.txt~')
        with open('index.txt', 'w') as f:
            f.write(''.join(new_lines))
        shutil.copymode('index.txt~', 'index.txt')
Exemplo n.º 23
0
 def err(msg):
     pkdp('{}: {}'.format(d, msg))
Exemplo n.º 24
0
def _one_dir():
    d = os.getcwd()
    pkdp('{}', d)
    def err(msg):
        pkdp('{}: {}'.format(d, msg))
    try:
        with open('index.txt') as f:
            lines = list(f)
    except:
        return
    images = set()
    bases = {}
    seen = set()
    for x in glob.glob('*.*'):
        m = _IMAGE_RE.search(x)
        if m:
            images.add(x)
            # There may be multiple but we are just using for anything
            # to match image bases only
            bases[m.group(1)] = x
    new_lines = []
    for l in lines:
        if re.search(r'^#\S+\.avi', l):
            # Fix previous avi bug
            l = l[1:]
        elif re.search(r'^#\d+_\d+\.jpg', l):
            l = l[1:].replace('_', '-', 1)
        elif l.startswith('#'):
            new_lines.append(l)
            continue
        m = _LINE_RE.search(l)
        if not m:
            if re.search(r'\S', l):
                err('{}: strange line, commenting'.format(l))
                new_lines.append('#' + l)
            err('{}: blank line, skipping'.format(l))
            continue
        i, t = m.group(1, 2)
        m = _IMAGE_RE.search(i)
        if not m:
            if i in bases:
                err('{}: substituting for {}'.format(i, bases[i]))
                i = bases[i]
                l = i + ' ' + t + '\n'
            else:
                err('{}: no such base or image, commenting'.format(i))
                new_lines.append('#' + l)
                continue
        if i in images:
            images.remove(i)
            seen.add(i)
        else:
            if i in seen:
                if t == '?':
                    err('{}: already seen no text, skipping'.format(i))
                    continue
                err('{}: already seen with text'.format(i))
            err('{}: no such image: text={}'.format(i, t))
            new_lines.append('#' + l)
            continue
        if not len(t):
            err('{}: no text, adding "?"'.format(l))
            l = i + ' ?\n'
        new_lines.append(l)
            
    if images:
        err('{}: extra images, append'.format(images))
        for i in images:
            new_lines.append(i + ' ?\n')
    if new_lines != lines:
        pkdp('writing: index.txt')
        os.rename('index.txt', 'index.txt~')
        with open('index.txt', 'w') as f:
            f.write(''.join(new_lines))
        shutil.copymode('index.txt~', 'index.txt')
Exemplo n.º 25
0
 def err(msg):
     pkdp('{}: {}'.format(d, msg))