Ejemplo n.º 1
0
def test(name, input, want_obj, want_bytes, **kw):
    json_save(input, name, **kw)
    with open(name, "rb") as fh:
        got_bytes_raw = fh.read()
        assert got_bytes_raw[
            -1:] == b"\n", name + " didn't even end with a newline"
        got_bytes_raw = got_bytes_raw[:-1]
    as_str = json_encode(input, as_str=True, **kw)
    as_bytes = json_encode(input, as_str=False, **kw)
    assert isinstance(as_str, str) and isinstance(
        as_bytes, bytes), "json_encode returns the wrong types: %s %s" % (
            type(as_str),
            type(as_bytes),
        )
    assert as_bytes == got_bytes_raw, "json_save doesn't save the same thing json_encode returns for " + name
    if PY3:
        as_str = as_str.encode("utf-8")
    assert as_bytes == as_str, "json_encode doesn't return the same data for as_str=True and False"
    got_obj = json_load(name)
    assert want_obj == got_obj, "%s roundtrips wrong (wanted %r, got %r)" % (
        name, want_obj, got_obj)
    with open(name, "rb") as fh:
        got_bytes_fuzzy = b"".join(line.strip() for line in fh)
    assert want_bytes == got_bytes_fuzzy, "%s wrong on disk (but decoded right)" % (
        name, )
Ejemplo n.º 2
0
 def json_save(self,
               obj,
               filename='result.json',
               sliceno=None,
               sort_keys=True,
               temp=None):
     from accelerator.extras import json_save
     json_save(obj, filename, sliceno, sort_keys=sort_keys, temp=temp)
Ejemplo n.º 3
0
def save_setup(jobid, data):
    data = dict(data)
    data['version'] = 3
    data.update(data['params'][data['method']])
    del data['params']
    if '_typing' in data:
        data['_typing'] = data['_typing'][data['method']]
    filename = Job(jobid).filename('setup.json')
    json_save(data, filename, _encoder=encode_setup)
Ejemplo n.º 4
0
def analysis(sliceno):
    data = {sliceno}
    if options.inner:
        if options.file.sliced:
            value = options.file.load(sliceno)
            assert value == data
        else:
            try:
                options.file.load(sliceno)
                raise Exception("Allowed sliced load of unsliced file")
            except AssertionError:
                pass
        blob.save({'inner': sliceno}, 'inner.pickle', sliceno, temp=False)
        json_save({'inner': sliceno}, 'inner.json', sliceno)
    else:
        blob.save(data, 'data', sliceno, temp=False)
Ejemplo n.º 5
0
    def run_job(self,
                jobid,
                subjob_cookie=None,
                parent_pid=0,
                concurrency=None):
        W = self.workspaces[Job(jobid).workdir]
        #
        active_workdirs = {
            name: ws.path
            for name, ws in self.workspaces.items()
        }
        slices = self.workspaces[self.target_workdir].slices

        t0 = time.time()
        setup = update_setup(jobid, starttime=t0)
        prof = setup.get('exectime', DotDict())
        new_prof, files, subjobs = dispatch.launch(W.path, setup, self.config,
                                                   self.Methods,
                                                   active_workdirs, slices,
                                                   concurrency, self.debug,
                                                   self.server_url,
                                                   subjob_cookie, parent_pid)
        prefix = join(W.path, jobid) + '/'
        if not self.debug:
            for filename, temp in list(files.items()):
                if temp:
                    unlink(join(prefix, filename))
                    del files[filename]
        prof.update(new_prof)
        prof.total = 0
        prof.total = sum(v for v in prof.values()
                         if isinstance(v, (float, int)))
        if concurrency:
            prof.concurrency = concurrency
        data = dict(
            starttime=t0,
            endtime=time.time(),
            exectime=prof,
        )
        update_setup(jobid, **data)
        data['files'] = sorted(
            fn[len(prefix):] if fn.startswith(prefix) else fn for fn in files)
        data['subjobs'] = subjobs
        data['version'] = 1
        json_save(data, jobid.filename('post.json'))
Ejemplo n.º 6
0
def synthesis(params, job):
    data = {'foo'}
    if options.inner:
        if options.file.sliced:
            try:
                options.file.load()
                raise Exception("Allowed unsliced load of sliced file")
            except AssertionError:
                pass
        else:
            value = options.file.load()
            assert value == data
        blob.save({'inner': None}, 'inner.pickle')
        json_save({'inner': None}, 'inner.json')
    else:
        blob.save(data, 'data')
        # use different ways to construct the jwf so both get tested.
        verify(params, JobWithFile(params.jobid, 'data'))
        verify(params, job.withfile('data', True))
Ejemplo n.º 7
0
    def run_job(self, jobid, subjob_cookie=None, parent_pid=0):
        W = self.workspaces[Job(jobid).workdir]
        #
        active_workdirs = {
            name: ws.path
            for name, ws in self.workspaces.items()
        }
        slices = self.workspaces[self.target_workdir].slices

        t0 = time.time()
        setup = update_setup(jobid, starttime=t0)
        prof = setup.get('profile', DotDict())
        new_prof, files, subjobs = dispatch.launch(W.path, setup, self.config,
                                                   self.Methods,
                                                   active_workdirs, slices,
                                                   self.debug, self.daemon_url,
                                                   subjob_cookie, parent_pid)
        if self.debug:
            delete_from = Temp.TEMP
        else:
            delete_from = Temp.DEBUG
        for filename, temp in list(files.items()):
            if temp >= delete_from:
                unlink(join(W.path, jobid, filename))
                del files[filename]
        prof.update(new_prof)
        prof.total = 0
        prof.total = sum(v for v in prof.values()
                         if isinstance(v, (float, int)))
        data = dict(
            starttime=t0,
            endtime=time.time(),
            profile=prof,
        )
        update_setup(jobid, **data)
        data['files'] = files
        data['subjobs'] = subjobs
        json_save(data, jobid.filename('post.json'))
Ejemplo n.º 8
0
def save_setup(jobid, data):
    data = dict(data)
    data['version'] = 3
    filename = Job(jobid).filename('setup.json')
    json_save(data, filename, _encoder=encode_setup)