def test_run_large(self):

        self.explore(self.traj, trials=30)
        self.add_array_params(self.traj)

        self.traj.f_add_parameter('TEST', 'test_run')

        self.env.f_run(write_into_shared_storage)

        self.traj.f_load_skeleton()
        self.traj.f_load_items(self.traj.f_to_dict().keys(), only_empties=True)


        newtraj = self.load_trajectory(trajectory_name=self.traj.v_name, as_new=False)

        self.check_insertions(self.traj)
        self.check_insertions(newtraj)

        size=os.path.getsize(self.filename)
        size_in_mb = size/1000000.
        get_root_logger().info('Size is %sMB' % str(size_in_mb))
        self.assertTrue(size_in_mb < 10.0, 'Size is %sMB > 10MB' % str(size_in_mb))

        for res in self.traj.results.f_iter_leaves():
            if isinstance(res, SharedResult):
                for key in res.f_to_dict():
                    item = res[key]
                    if isinstance(item, SharedData):
                        make_ordinary_result(res, key, trajectory=self.traj)

        newtraj = self.load_trajectory(trajectory_name=self.traj.v_name, as_new=False)
        self.compare_trajectories(self.traj, newtraj)
Exemple #2
0
    def test_loading_run(self):

        self.traj.f_add_parameter('y', 12)
        self.traj.f_explore({'y': [12, 3, 3, 4]})

        self.traj.f_add_parameter('TEST', 'test_run')
        self.traj.f_add_derived_parameter('x', 42)
        self.traj.f_store()
        self.traj.dpar.f_remove_child('x')

        self.env.f_run(load_from_shared_storage)

        newtraj = self.load_trajectory(trajectory_name=self.traj.v_name,
                                       as_new=False)
        self.traj.f_load_skeleton()
        self.traj.f_load_items(self.traj.f_to_dict().keys(), only_empties=True)

        size = os.path.getsize(self.filename)
        size_in_mb = size / 1000000.
        get_root_logger().info('Size is %sMB' % str(size_in_mb))
        self.assertTrue(size_in_mb < 2.0,
                        'Size is %sMB > 2MB' % str(size_in_mb))

        newtraj = self.load_trajectory(trajectory_name=self.traj.v_name,
                                       as_new=False)
        self.compare_trajectories(self.traj, newtraj)
Exemple #3
0
 def test_import_star(self):
     for class_name in pypet.__all__:
         if class_name == 'test':
             continue
         logstr = 'Evaulauting %s: %s' % (class_name, repr(
             eval(class_name)))
         get_root_logger().info(logstr)
    def test_expand_after_reload(self):

        self.traj.f_add_parameter('TEST', 'test_expand_after_reload')
        ###Explore
        self.explore(self.traj)

        self.make_run()

        traj_name = self.traj.v_name


        self.env = Environment(trajectory=self.traj,
                          log_stdout=False,
                          log_config=get_log_config())

        self.traj = self.env.v_trajectory

        self.traj.f_load(name=traj_name)
        self.traj.res.f_remove()
        self.traj.dpar.f_remove()

        self.expand()

        get_root_logger().info('\n $$$$$$$$$$$$ Second Run $$$$$$$$$$ \n')
        self.make_run()

        newtraj = self.load_trajectory(trajectory_name=self.traj.v_name,as_new=False)
        self.traj.f_load_skeleton()
        self.traj.f_load_items(self.traj.f_to_dict().keys(), only_empties=True)

        self.compare_trajectories(self.traj, newtraj)
Exemple #5
0
    def test_run_large(self):

        self.explore(self.traj, trials=15)
        self.add_array_params(self.traj)

        self.traj.f_add_parameter('TEST', 'test_run')

        self.env.f_run(write_into_shared_storage)

        self.traj.f_load_skeleton()
        self.traj.f_load_items(self.traj.f_to_dict().keys(), only_empties=True)

        newtraj = self.load_trajectory(trajectory_name=self.traj.v_name,
                                       as_new=False)

        self.check_insertions(self.traj)
        self.check_insertions(newtraj)

        size = os.path.getsize(self.filename)
        size_in_mb = size / 1000000.
        get_root_logger().info('Size is %sMB' % str(size_in_mb))
        self.assertTrue(size_in_mb < 10.0,
                        'Size is %sMB > 10MB' % str(size_in_mb))

        for res in self.traj.results.f_iter_leaves():
            if isinstance(res, SharedResult):
                for key in res.f_to_dict():
                    item = res[key]
                    if isinstance(item, SharedData):
                        make_ordinary_result(res, key, trajectory=self.traj)

        newtraj = self.load_trajectory(trajectory_name=self.traj.v_name,
                                       as_new=False)
        self.compare_trajectories(self.traj, newtraj)
Exemple #6
0
    def test_version_mismatch(self):
        traj = Trajectory(name='TestVERSION', filename=make_temp_dir('testversionmismatch.hdf5'),
                          add_time=True)

        traj.f_add_parameter('group1.test',42)

        traj.f_add_result('testres', 42)

        traj.group1.f_set_annotations(Test=44)

        traj._version='0.1a.1'

        traj.f_store()

        traj2 = Trajectory(name=traj.v_name, add_time=False,
                           filename=make_temp_dir('testversionmismatch.hdf5'))

        with self.assertRaises(pex.VersionMismatchError):
            traj2.f_load(load_parameters=2, load_results=2)

        traj2.f_load(load_parameters=2, load_results=2, force=True)

        self.compare_trajectories(traj,traj2)

        get_root_logger().info('Mismatch testing done!')
Exemple #7
0
    def test_version_mismatch(self):
        traj = Trajectory(name='TestVERSION', filename=make_temp_dir('testversionmismatch.hdf5'),
                          add_time=True)

        traj.f_add_parameter('group1.test',42)

        traj.f_add_result('testres', 42)

        traj.group1.f_set_annotations(Test=44)

        traj._version='0.1a.1'

        traj.f_store()

        traj2 = Trajectory(name=traj.v_name, add_time=False,
                           filename=make_temp_dir('testversionmismatch.hdf5'))

        with self.assertRaises(pex.VersionMismatchError):
            traj2.f_load(load_parameters=2, load_results=2)

        traj2.f_load(load_parameters=2, load_results=2, force=True)

        self.compare_trajectories(traj,traj2)

        get_root_logger().info('Mismatch testing done!')
    def test_expand_after_reload(self):

        self.traj.f_add_parameter('TEST', 'test_expand_after_reload')
        ###Explore
        self.explore(self.traj)

        self.make_run()

        traj_name = self.traj.v_name


        self.env = Environment(trajectory=self.traj,
                          log_stdout=False,
                          log_config=get_log_config())

        self.traj = self.env.v_trajectory

        self.traj.f_load(name=traj_name)
        self.traj.res.f_remove()
        self.traj.dpar.f_remove()

        self.expand()

        get_root_logger().info('\n $$$$$$$$$$$$ Second Run $$$$$$$$$$ \n')
        self.make_run()

        newtraj = self.load_trajectory(trajectory_name=self.traj.v_name,as_new=False)
        self.traj.f_load_skeleton()
        self.traj.f_load_items(self.traj.f_to_dict().keys(), only_empties=True)

        self.compare_trajectories(self.traj, newtraj)
Exemple #9
0
def postproc(traj, results, idx):
    get_root_logger().info(idx)

    traj.f_load_skeleton()

    if len(results) <= 4 and len(traj) == 4:
        return {'x':[1,2], 'y':[1,2]}
    if len(results) <= 6 and len(traj) == 6:
        traj.f_expand({'x':[2,3], 'y':[0,1]})
Exemple #10
0
    def test_compacting(self):
        filename = make_temp_dir('hdf5compacting.hdf5')
        traj = Trajectory(name=make_trajectory_name(self), filename=filename)
        trajname = traj.v_name
        traj.v_storage_service.complevel = 7

        first_row = {'ha': compat.tobytes('hi'), 'haha': np.zeros((3, 3))}

        traj.f_store(only_init=True)

        traj.f_add_result('My.Tree.Will.Be.Deleted', 42)
        traj.f_add_result('Mine.Too.HomeBoy', 42, comment='Don`t cry for me!')

        res = traj.f_add_result(SharedResult, 'myres')

        res['myres'] = SharedTable()

        res['myres'].create_shared_data(first_row=first_row)

        with StorageContextManager(traj):
            traj.myres
            for irun in range(10000):
                row = traj.myres.row
                for key in first_row:
                    row[key] = first_row[key]
                row.append()
        traj.f_store()
        del traj
        traj = load_trajectory(name=trajname, filename=filename, load_all=2)
        with StorageContextManager(traj) as cm:
            tb = traj.myres.get_data_node()
            ptcompat.remove_rows(tb, 1000, 10000)

            cm.flush_store()
            self.assertTrue(traj.myres.nrows == 1001)

        traj.f_delete_item(traj.My, recursive=True)
        traj.f_delete_item(traj.Mine, recursive=True)

        size = os.path.getsize(filename)
        get_root_logger().info('Filesize is %s' % str(size))
        name_wo_ext, ext = os.path.splitext(filename)
        backup_file_name = name_wo_ext + '_backup' + ext
        code = compact_hdf5_file(filename, keep_backup=True)
        if code != 0:
            raise RuntimeError('ptrepack fail')
        backup_size = os.path.getsize(backup_file_name)
        self.assertTrue(backup_size == size)
        new_size = os.path.getsize(filename)
        get_root_logger().info('New filesize is %s' % str(new_size))
        self.assertTrue(new_size < size,
                        "%s > %s" % (str(new_size), str(size)))
Exemple #11
0
def postproc_with_iter_args(traj, results, idx):
    get_root_logger().info(idx)

    if isinstance(traj.v_storage_service, (LockWrapper, ReferenceWrapper)):
        traj.f_load_skeleton()

    if len(results) <= 4 and len(traj) == 4:
        assert idx == 42
        return {'x':[1,2], 'y':[1,2]}, ([5,6],), {'w':[7,8]}, (43,)
    if len(results) <= 6 and len(traj) == 6:
        assert idx == 43
        traj.f_expand({'x':[2,3], 'y':[0,1]})
        return {}, ([7,8],), {'w':[9,10]}
Exemple #12
0
def postproc_with_iter_args(traj, results, idx):
    get_root_logger().info(idx)

    if isinstance(traj.v_storage_service, (LockWrapper, ReferenceWrapper)):
        traj.f_load_skeleton()

    if len(results) <= 4 and len(traj) == 4:
        assert idx == 42
        return {'x': [1, 2], 'y': [1, 2]}, ([5, 6], ), {'w': [7, 8]}, (43, )
    if len(results) <= 6 and len(traj) == 6:
        assert idx == 43
        traj.f_expand({'x': [2, 3], 'y': [0, 1]})
        return {}, ([7, 8], ), {'w': [9, 10]}
Exemple #13
0
    def test_compacting(self):
        filename = make_temp_dir("hdf5compacting.hdf5")
        traj = Trajectory(name=make_trajectory_name(self), filename=filename)
        trajname = traj.v_name
        traj.v_storage_service.complevel = 7

        first_row = {"ha": compat.tobytes("hi"), "haha": np.zeros((3, 3))}

        traj.f_store(only_init=True)

        res1 = traj.f_add_result("My.Tree.Will.Be.Deleted", 42)
        res2 = traj.f_add_result("Mine.Too.HomeBoy", 42, comment="Don`t cry for me!")

        res = traj.f_add_result(SharedResult, "myres")

        res["myres"] = SharedTable()

        res["myres"].create_shared_data(first_row=first_row)

        with StorageContextManager(traj):
            tab = traj.myres
            for irun in range(10000):
                row = traj.myres.row
                for key in first_row:
                    row[key] = first_row[key]
                row.append()
        traj.f_store()
        del traj
        traj = load_trajectory(name=trajname, filename=filename, load_all=2)
        with StorageContextManager(traj) as cm:
            tb = traj.myres.get_data_node()
            ptcompat.remove_rows(tb, 1000, 10000)

            cm.f_flush_store()
            self.assertTrue(traj.myres.nrows == 1001)

        traj.f_delete_item(traj.My, recursive=True)
        traj.f_delete_item(traj.Mine, recursive=True)

        size = os.path.getsize(filename)
        get_root_logger().info("Filesize is %s" % str(size))
        name_wo_ext, ext = os.path.splitext(filename)
        backup_file_name = name_wo_ext + "_backup" + ext
        code = compact_hdf5_file(filename, keep_backup=True)
        if code != 0:
            raise RuntimeError("ptrepack fail")
        backup_size = os.path.getsize(backup_file_name)
        self.assertTrue(backup_size == size)
        new_size = os.path.getsize(filename)
        get_root_logger().info("New filesize is %s" % str(new_size))
        self.assertTrue(new_size < size, "%s > %s" % (str(new_size), str(size)))
Exemple #14
0
    def test_two_runs(self):
        self.traj.f_add_parameter('TEST', 'test_run')
        self.traj.hdf5.purge_duplicate_comments = False
        ###Explore
        self.explore(self.traj)

        self.make_run()

        newtraj = self.load_trajectory(trajectory_name=self.traj.v_name,
                                       as_new=False)
        self.traj.f_load_skeleton()
        self.traj.f_load_items(self.traj.f_to_dict().keys(), only_empties=True)

        self.compare_trajectories(self.traj, newtraj)

        size = os.path.getsize(self.filename)
        size_in_mb = size / 1000000.
        get_root_logger().info('Size is %sMB' % str(size_in_mb))
        self.assertTrue(size_in_mb < 6.0,
                        'Size is %sMB > 6MB' % str(size_in_mb))

        mp_traj = self.traj

        old_multiproc = self.multiproc
        self.multiproc = False

        ### Make a new single core run
        self.setUp()

        self.traj.f_add_parameter('TEST', 'test_run')
        self.traj.hdf5.purge_duplicate_comments = False
        ###Explore
        self.explore(self.traj)

        self.make_run()

        # newtraj = self.load_trajectory(trajectory_name=self.traj.v_name,as_new=False)
        self.traj.f_load_skeleton()
        self.traj.f_load_items(self.traj.f_to_dict().keys(), only_empties=True)

        self.compare_trajectories(self.traj, newtraj)

        size = os.path.getsize(self.filename)
        size_in_mb = size / 1000000.
        get_root_logger().info('Size is %sMB' % str(size_in_mb))
        self.assertTrue(size_in_mb < 6.0,
                        'Size is %sMB > 6MB' % str(size_in_mb))

        self.compare_trajectories(mp_traj, self.traj)
        self.multiproc = old_multiproc
    def test_a_large_run(self):
        get_root_logger().info('Testing large run')
        self.traj.f_add_parameter('TEST', 'test_run')
        ###Explore
        self.explore_large(self.traj)
        self.make_run_large_data()

        self.assertTrue(self.traj.f_is_completed())

        # Check if printing and repr work
        get_root_logger().info(str(self.env))
        get_root_logger().info(repr(self.env))

        newtraj = Trajectory()
        newtraj.f_load(name=self.traj.v_name, as_new=False, load_data=2, filename=self.filename)

        self.traj.f_load_skeleton()
        self.traj.f_load_items(self.traj.f_to_dict().keys(), only_empties=True)

        self.compare_trajectories(self.traj,newtraj)

        size=os.path.getsize(self.filename)
        size_in_mb = size/1000000.
        get_root_logger().info('Size is %sMB' % str(size_in_mb))
        self.assertTrue(size_in_mb < 30.0, 'Size is %sMB > 30MB' % str(size_in_mb))
Exemple #16
0
    def test_file_size_many_params(self):
        filename = make_temp_dir('filesize.hdf5')
        traj = Trajectory(filename=filename, overwrite_file=True, add_time=False)
        npars = 700
        traj.f_store()
        for irun in range(npars):
            par = traj.f_add_parameter('test.test%d' % irun, 42+irun, comment='duh!')
            traj.f_store_item(par)


        size =  os.path.getsize(filename)
        size_in_mb = size/1000000.
        get_root_logger().info('Size is %sMB' % str(size_in_mb))
        self.assertTrue(size_in_mb < 10.0, 'Size is %sMB > 10MB' % str(size_in_mb))
Exemple #17
0
    def test_a_large_run(self):
        get_root_logger().info('Testing large run')
        self.traj.f_add_parameter('TEST', 'test_run')
        ###Explore
        self.explore_large(self.traj)
        self.make_run_large_data()

        self.assertTrue(self.traj.f_is_completed())

        # Check if printing and repr work
        get_root_logger().info(str(self.env))
        get_root_logger().info(repr(self.env))

        newtraj = Trajectory()
        newtraj.f_load(name=self.traj.v_name, as_new=False, load_data=2, filename=self.filename)

        self.traj.f_load_skeleton()
        self.traj.f_load_items(self.traj.f_to_dict().keys(), only_empties=True)

        self.compare_trajectories(self.traj,newtraj)

        size=os.path.getsize(self.filename)
        size_in_mb = size/1000000.
        get_root_logger().info('Size is %sMB' % str(size_in_mb))
        self.assertTrue(size_in_mb < 30.0, 'Size is %sMB > 30MB' % str(size_in_mb))
Exemple #18
0
    def test_file_size_many_params(self):
        filename = make_temp_dir('filesize.hdf5')
        traj = Trajectory(filename=filename, overwrite_file=True, add_time=False)
        npars = 700
        traj.f_store()
        for irun in range(npars):
            par = traj.f_add_parameter('test.test%d' % irun, 42+irun, comment='duh!')
            traj.f_store_item(par)


        size =  os.path.getsize(filename)
        size_in_mb = size/1000000.
        get_root_logger().info('Size is %sMB' % str(size_in_mb))
        self.assertTrue(size_in_mb < 10.0, 'Size is %sMB > 10MB' % str(size_in_mb))
Exemple #19
0
def multiply_args(traj, arg1=0, arg2=0, arg3=0):
    rootlogger = get_root_logger()
    z=traj.x*traj.y + arg1 + arg2 + arg3
    rootlogger.info('z=x*y: '+str(z)+'='+str(traj.x)+'*'+str(traj.y) +
                    '+'+str(arg1)+'+'+str(arg2)+'+'+str(arg3))
    traj.f_add_result('z',z)
    return z
Exemple #20
0
def multiply_with_storing(traj):
    rootlogger = get_root_logger()
    z = traj.x * traj.y
    rootlogger.info('z=x*y: ' + str(z) + '=' + str(traj.x) + '*' + str(traj.y))
    traj.f_add_result('z', z)
    traj.f_store()
    return z
Exemple #21
0
def multiply_args(traj, arg1=0, arg2=0, arg3=0):
    rootlogger = get_root_logger()
    z=traj.x*traj.y + arg1 + arg2 + arg3
    rootlogger.info('z=x*y: '+str(z)+'='+str(traj.x)+'*'+str(traj.y) +
                    '+'+str(arg1)+'+'+str(arg2)+'+'+str(arg3))
    traj.f_add_result('z',z)
    return z
Exemple #22
0
def multiply_with_storing(traj):
    rootlogger = get_root_logger()
    z=traj.x*traj.y
    rootlogger.info('z=x*y: '+str(z)+'='+str(traj.x)+'*'+str(traj.y))
    traj.f_add_result('z',z)
    traj.f_store()
    return z
Exemple #23
0
def write_into_shared_storage(traj):
    traj.f_add_result("ggg", 42)
    traj.f_add_derived_parameter("huuu", 46)

    root = get_root_logger()
    daarrays = traj.res.daarrays
    idx = traj.v_idx
    ncores = traj[traj.v_environment_name].f_get_default("ncores", 1)
    root.info("1. This")
    a = daarrays.a
    a[idx] = idx
    root.info("2. is")
    ca = daarrays.ca
    ca[idx] = idx
    root.info("3. a")
    ea = daarrays.ea
    ea.append(np.ones((1, 10)) * idx)
    root.info("4. sequential")
    vla = daarrays.vla
    vla.append(np.ones(idx + 2) * idx)
    root.info("5. Block")
    the_range = list(range(max(0, idx - 2 * ncores), max(0, idx)))
    for irun in the_range:
        x, y = a[irun], irun
        if x != y and x != 0:
            raise RuntimeError("ERROR in write_into_shared_storage %s != %s" % (str(x), str(y)))
        x, y = ca[irun], irun
        if x != y and x != 0:
            raise RuntimeError("ERROR in write_into_shared_storage %s != %s" % (str(x), str(y)))
        try:
            x, y = ea[irun, 9], ea[irun, 8]
            if x != y and x != 0:
                raise RuntimeError("ERROR in write_into_shared_storage %s != %s" % (str(x), str(y)))
        except IndexError:
            pass  # Array is not at this size yet
        try:
            x, y = vla[irun][0], vla[irun][1]
            if x != y and x != 0:
                raise RuntimeError("ERROR in write_into_shared_storage %s != %s" % (str(x), str(y)))
        except IndexError:
            pass  # Array is not at this size yet
    root.info("6. !!!!!!!!!")

    tabs = traj.tabs

    with StorageContextManager(traj) as cm:
        t1 = tabs.t1
        row = t1.row
        row["run_name"] = compat.tobytes(traj.v_crun)
        row["idx"] = idx
        row.append()
        t1.flush()

    t2 = tabs.t2
    row = t2[idx]
    if row["run_name"] != compat.tobytes(traj.v_crun):
        raise RuntimeError("Names in run table do not match, Run: %s != %s" % (row["run_name"], traj.v_crun))

    df = traj.df
    df.append(pd.DataFrame({"idx": [traj.v_idx], "run_name": traj.v_crun}))
Exemple #24
0
    def test_two_runs(self):
        self.traj.f_add_parameter('TEST', 'test_run')
        self.traj.hdf5.purge_duplicate_comments = False
        ###Explore
        self.explore(self.traj)

        self.make_run()

        newtraj = self.load_trajectory(trajectory_name=self.traj.v_name,as_new=False)
        self.traj.f_load_skeleton()
        self.traj.f_load_items(self.traj.f_to_dict().keys(), only_empties=True)

        self.compare_trajectories(self.traj, newtraj)

        size=os.path.getsize(self.filename)
        size_in_mb = size/1000000.
        get_root_logger().info('Size is %sMB' % str(size_in_mb))
        self.assertTrue(size_in_mb < 6.0, 'Size is %sMB > 6MB' % str(size_in_mb))

        mp_traj = self.traj

        old_multiproc = self.multiproc
        self.multiproc = False

        ### Make a new single core run
        self.setUp()

        self.traj.f_add_parameter('TEST', 'test_run')
        self.traj.hdf5.purge_duplicate_comments = False
        ###Explore
        self.explore(self.traj)

        self.make_run()

        # newtraj = self.load_trajectory(trajectory_name=self.traj.v_name,as_new=False)
        self.traj.f_load_skeleton()
        self.traj.f_load_items(self.traj.f_to_dict().keys(), only_empties=True)

        self.compare_trajectories(self.traj, newtraj)

        size=os.path.getsize(self.filename)
        size_in_mb = size/1000000.
        get_root_logger().info('Size is %sMB' % str(size_in_mb))
        self.assertTrue(size_in_mb < 6.0, 'Size is %sMB > 6MB' % str(size_in_mb))

        self.compare_trajectories(mp_traj, self.traj)
        self.multiproc = old_multiproc
Exemple #25
0
    def test_expand(self):
        ###Explore
        self.explore(self.traj)

        results = self.env.f_run(multiply)
        self.are_results_in_order(results)

        get_root_logger().info(results)
        traj = self.traj
        self.assertEqual(len(traj),
                         len(list(compat.listvalues(self.explore_dict)[0])))

        self.traj.f_load_skeleton()
        self.traj.f_load_items(self.traj.f_to_dict().keys(), only_empties=True)
        self.check_if_z_is_correct(traj)

        traj_name = self.env.v_trajectory.v_name
        del self.env
        self.env = Environment(trajectory=self.traj,
                               log_stdout=False,
                               log_config=get_log_config())

        self.traj = self.env.v_trajectory

        self.traj.f_load(name=traj_name)

        self.expand(self.traj)

        results = self.env.f_run(multiply)
        self.are_results_in_order(results)

        traj = self.traj
        self.assertTrue(
            len(traj) == len(compat.listvalues(self.expand_dict)[0]) +
            len(compat.listvalues(self.explore_dict)[0]))

        self.traj.f_load_skeleton()
        self.traj.f_load_items(self.traj.f_to_dict().keys(), only_empties=True)
        self.check_if_z_is_correct(traj)

        newtraj = self.load_trajectory(trajectory_name=self.traj.v_name,
                                       as_new=False)
        self.traj.f_load_skeleton()
        self.traj.f_load_items(self.traj.f_to_dict().keys(), only_empties=True)

        self.compare_trajectories(self.traj, newtraj)
Exemple #26
0
def postproc(traj, results, idx):
    get_root_logger().info(idx)

    if isinstance(traj.v_storage_service, (LockWrapper, ReferenceWrapper)):
        traj.f_load_skeleton()

    if isinstance(traj.v_storage_service, (QueueStorageServiceSender, PipeStorageServiceSender)):
        try:
            traj.f_load()
            raise RuntimeError('Should not load')
        except NotImplementedError:
            pass

    if len(results) <= 4 and len(traj) == 4:
        return {'x':[1,2], 'y':[1,2]}
    if len(results) <= 6 and len(traj) == 6:
        traj.f_expand({'x':[2,3], 'y':[0,1]})
    def test_just_one_run(self):
        self.make_run()
        newtraj = self.load_trajectory(trajectory_name=self.traj.v_name,as_new=False)
        self.traj.f_load_skeleton()
        self.traj.f_load_items(self.traj.f_to_dict().keys(), only_empties=True)

        self.compare_trajectories(self.traj, newtraj)

        self.assertTrue(len(newtraj) == 1)

        size=os.path.getsize(self.filename)
        size_in_mb = size/1000000.
        get_root_logger().info('Size is %sMB' % str(size_in_mb))
        self.assertTrue(size_in_mb < 2.0, 'Size is %sMB > 6MB' % str(size_in_mb))

        with self.assertRaises(TypeError):
            self.explore(self.traj)
    def test_run(self):
        self.traj.f_add_parameter('TEST', 'test_run')
        ###Explore
        self.explore(self.traj)

        self.make_run()

        newtraj = self.load_trajectory(trajectory_name=self.traj.v_name,as_new=False)
        self.traj.f_load_skeleton()
        self.traj.f_load_items(self.traj.f_to_dict().keys(), only_empties=True)

        self.compare_trajectories(self.traj, newtraj)

        size=os.path.getsize(self.filename)
        size_in_mb = size/1000000.
        get_root_logger().info('Size is %sMB' % str(size_in_mb))
        self.assertTrue(size_in_mb < 6.0, 'Size is %sMB > 6MB' % str(size_in_mb))
Exemple #29
0
def multiply_with_graceful_exit(traj):
    z=traj.x*traj.y
    rootlogger = get_root_logger()
    rootlogger.info('z=x*y: '+str(z)+'='+str(traj.x)+'*'+str(traj.y))
    traj.f_add_result('z',z)
    if traj.v_idx > 3:
        if not sigint_handling.hit:
            sigint_handling._handle_sigint(None, None)
    return z
Exemple #30
0
def postproc(traj, results, idx):
    get_root_logger().info(idx)

    if isinstance(traj.v_storage_service, (LockWrapper, ReferenceWrapper)):
        traj.f_load_skeleton()

    if isinstance(traj.v_storage_service,
                  (QueueStorageServiceSender, PipeStorageServiceSender)):
        try:
            traj.f_load()
            raise RuntimeError('Should not load')
        except NotImplementedError:
            pass

    if len(results) <= 4 and len(traj) == 4:
        return {'x': [1, 2], 'y': [1, 2]}
    if len(results) <= 6 and len(traj) == 6:
        traj.f_expand({'x': [2, 3], 'y': [0, 1]})
Exemple #31
0
def multiply_with_graceful_exit(traj):
    z = traj.x * traj.y
    rootlogger = get_root_logger()
    rootlogger.info('z=x*y: ' + str(z) + '=' + str(traj.x) + '*' + str(traj.y))
    traj.f_add_result('z', z)
    if traj.v_idx > 3:
        if not sigint_handling.hit:
            sigint_handling._handle_sigint(None, None)
    return z
    def test_expand(self):

        ###Explore
        self.traj.f_add_parameter('TEST', 'test_expand')
        self.explore(self.traj)

        self.make_run()

        self.expand()

        get_root_logger().info('\n $$$$$$$$$$$$$$$$$ Second Run $$$$$$$$$$$$$$$$$$$$$$$$')
        self.make_run()

        newtraj = self.load_trajectory(trajectory_name=self.traj.v_name,as_new=False)
        self.traj.f_load_skeleton()
        self.traj.f_load_items(self.traj.f_to_dict().keys(), only_empties=True)

        self.compare_trajectories(self.traj, newtraj)
Exemple #33
0
    def test_expand(self):

        ###Explore
        self.traj.f_add_parameter('TEST', 'test_expand')
        self.explore(self.traj)

        self.make_run()

        self.expand()

        get_root_logger().info('\n $$$$$$$$$$$$$$$$$ Second Run $$$$$$$$$$$$$$$$$$$$$$$$')
        self.make_run()

        newtraj = self.load_trajectory(trajectory_name=self.traj.v_name,as_new=False)
        self.traj.f_load_skeleton()
        self.traj.f_load_items(self.traj.f_to_dict().keys(), only_empties=True)

        self.compare_trajectories(self.traj, newtraj)
Exemple #34
0
    def test_expand(self):
        ###Explore
        self.explore(self.traj)

        results = self.env.f_run(multiply)
        self.are_results_in_order(results)

        get_root_logger().info(results)
        traj = self.traj
        self.assertEqual(len(traj), len(list(compat.listvalues(self.explore_dict)[0])))

        self.traj.f_load_skeleton()
        self.traj.f_load_items(self.traj.f_to_dict().keys(), only_empties=True)
        self.check_if_z_is_correct(traj)

        traj_name = self.env.v_trajectory.v_name
        del self.env
        self.env = Environment(trajectory=self.traj,
                          log_stdout=False,
                          log_config=get_log_config())

        self.traj = self.env.v_trajectory

        self.traj.f_load(name=traj_name)

        self.expand(self.traj)

        results = self.env.f_run(multiply)
        self.are_results_in_order(results)

        traj = self.traj
        self.assertTrue(len(traj) == len(compat.listvalues(self.expand_dict)[0])+ len(compat.listvalues(self.explore_dict)[0]))


        self.traj.f_load_skeleton()
        self.traj.f_load_items(self.traj.f_to_dict().keys(), only_empties=True)
        self.check_if_z_is_correct(traj)

        newtraj = self.load_trajectory(trajectory_name=self.traj.v_name,as_new=False)
        self.traj.f_load_skeleton()
        self.traj.f_load_items(self.traj.f_to_dict().keys(), only_empties=True)

        self.compare_trajectories(self.traj,newtraj)
    def test_just_one_run(self):
        self.make_run()
        self.assertTrue(self.traj.f_is_completed())

        newtraj = self.load_trajectory(trajectory_name=self.traj.v_name,as_new=False)
        self.traj.f_load_skeleton()
        self.traj.f_load_items(self.traj.f_to_dict().keys(), only_empties=True)

        self.compare_trajectories(self.traj, newtraj)

        self.assertTrue(len(newtraj) == 1)

        size=os.path.getsize(self.filename)
        size_in_mb = size/1000000.
        get_root_logger().info('Size is %sMB' % str(size_in_mb))
        self.assertTrue(size_in_mb < 2.0, 'Size is %sMB > 6MB' % str(size_in_mb))

        with self.assertRaises(TypeError):
            self.explore(self.traj)
    def test_run(self):
        self.traj.f_add_parameter('TEST', 'test_run')
        ###Explore
        self.explore(self.traj)

        self.make_run()

        self.assertTrue(self.traj.f_is_completed())

        newtraj = self.load_trajectory(trajectory_name=self.traj.v_name,as_new=False)
        self.traj.f_load_skeleton()
        self.traj.f_load_items(self.traj.f_to_dict().keys(), only_empties=True)

        self.compare_trajectories(self.traj, newtraj)

        size=os.path.getsize(self.filename)
        size_in_mb = size/1000000.
        get_root_logger().info('Size is %sMB' % str(size_in_mb))
        self.assertTrue(size_in_mb < 6.0, 'Size is %sMB > 6MB' % str(size_in_mb))
def write_into_shared_storage(traj):
    traj.f_add_result('ggg', 42)
    traj.f_add_derived_parameter('huuu', 46)

    root = get_root_logger()
    daarrays = traj.res.daarrays
    idx = traj.v_idx
    ncores = traj[traj.v_environment_name].f_get_default('ncores', 1)
    root.info('1. This')
    a = daarrays.a
    a[idx] = idx
    root.info('2. is')
    ca = daarrays.ca
    ca[idx] = idx
    root.info('3. a')
    ea = daarrays.ea
    ea.append(np.ones((1,10))*idx)
    root.info('4. sequential')
    vla = daarrays.vla
    vla.append(np.ones(idx+2)*idx)
    root.info('5. Block')
    if idx > ncores+2:
        x, y = a[idx-ncores], idx-ncores
        if x != y:
            raise RuntimeError('ERROR in write_into_shared_storage %s != %s' % (str(x), str(y)))
        x, y = ca[idx-ncores], idx-ncores
        if x != y:
            raise RuntimeError('ERROR in write_into_shared_storage %s != %s' % (str(x), str(y)))
        x, y = ea[idx-ncores, 9], ea[idx-ncores, 8]
        if x != y:
            raise RuntimeError('ERROR in write_into_shared_storage %s != %s' % (str(x), str(y)))
        x, y = vla[idx-ncores][0], vla[idx-ncores][1]
        if x != y:
            raise RuntimeError('ERROR in write_into_shared_storage %s != %s' % (str(x), str(y)))
    root.info('6. !!!!!!!!!')

    tabs = traj.tabs

    with StorageContextManager(traj) as cm:
        t1 = tabs.t1
        row = t1.row
        row['run_name'] = compat.tobytes(traj.v_crun)
        row['idx'] = idx
        row.append()
        t1.flush()

    t2 = tabs.t2
    row = t2[idx]
    if row['run_name'] != compat.tobytes(traj.v_crun):
        raise RuntimeError('Names in run table do not match, Run: %s != %s' % (row['run_name'],
                                                                                   traj.v_crun) )

    df = traj.df
    df.append(pd.DataFrame({'idx':[traj.v_idx], 'run_name':traj.v_crun}))
Exemple #38
0
    def test_progressbar_logging(self):
        logger = get_root_logger()

        total = 33

        for irun in range(total):
            time.sleep(0.005)
            progressbar(irun, total, logger=logger)

        for irun in range(total):
            time.sleep(0.005)
            progressbar(irun, total, logger='GetLogger')
Exemple #39
0
    def test_progressbar_logging(self):
        logger = get_root_logger()

        total = 33

        for irun in range(total):
            time.sleep(0.005)
            progressbar(irun, total, logger=logger)

        for irun in range(total):
            time.sleep(0.005)
            progressbar(irun, total, logger='GetLogger')
Exemple #40
0
    def test_giant_matrices(self):

        self.length = 20
        self.traj.f_explore({"trial": range(self.length)})

        self.add_matrix_params(self.traj)

        self.traj.f_add_parameter("TEST", "test_run")

        self.env.f_run(copy_one_entry_from_giant_matrices)

        newtraj = self.load_trajectory(trajectory_name=self.traj.v_name, as_new=False)
        self.traj.f_load_skeleton()
        self.traj.f_load_items(self.traj.f_to_dict().keys(), only_empties=True)

        self.check_matrices(self.traj)
        self.check_matrices(newtraj)

        size = os.path.getsize(self.filename)
        size_in_mb = size / 1000000.0
        get_root_logger().info("Size is %sMB" % str(size_in_mb))
        self.assertTrue(size_in_mb < 400.0, "Size is %sMB > 400MB" % str(size_in_mb))
    def test_loading_run(self):

        self.traj.f_add_parameter('y', 12)
        self.traj.f_explore({'y':[12,3,3,4]})

        self.traj.f_add_parameter('TEST', 'test_run')
        self.traj.f_add_derived_parameter('x', 42)
        self.traj.f_store()
        self.traj.dpar.f_remove_child('x')

        self.env.f_run(load_from_shared_storage)

        newtraj = self.load_trajectory(trajectory_name=self.traj.v_name, as_new=False)
        self.traj.f_load_skeleton()
        self.traj.f_load_items(self.traj.f_to_dict().keys(), only_empties=True)

        size=os.path.getsize(self.filename)
        size_in_mb = size/1000000.
        get_root_logger().info('Size is %sMB' % str(size_in_mb))
        self.assertTrue(size_in_mb < 2.0, 'Size is %sMB > 2MB' % str(size_in_mb))

        newtraj = self.load_trajectory(trajectory_name=self.traj.v_name, as_new=False)
        self.compare_trajectories(self.traj, newtraj)
Exemple #42
0
    def test_giant_matrices(self):

        self.length = 20
        self.traj.f_explore({'trial': range(self.length)})

        self.add_matrix_params(self.traj)

        self.traj.f_add_parameter('TEST', 'test_run')

        self.env.f_run(copy_one_entry_from_giant_matrices)

        newtraj = self.load_trajectory(trajectory_name=self.traj.v_name,
                                       as_new=False)
        self.traj.f_load_skeleton()
        self.traj.f_load_items(self.traj.f_to_dict().keys(), only_empties=True)

        self.check_matrices(self.traj)
        self.check_matrices(newtraj)

        size = os.path.getsize(self.filename)
        size_in_mb = size / 1000000.
        get_root_logger().info('Size is %sMB' % str(size_in_mb))
        self.assertTrue(size_in_mb < 400.0,
                        'Size is %sMB > 400MB' % str(size_in_mb))
Exemple #43
0
def run_net(traj):

    clear(True, True)
    get_root_logger().info(traj.defaultclock)
    defaultclock.dt = traj.defaultclock

    C = traj.C
    gL = traj.gL
    EL = traj.EL
    VT = traj.VT
    DeltaT = traj.DeltaT
    tauw = traj.tauw
    a = traj.a
    b = traj.b
    I = traj.I
    Vcut = traj.Vcut  # practical threshold condition
    N = traj.N

    eqs = traj.eqs

    neuron = NeuronGroup(N, model=eqs, threshold=Vcut, reset=traj.reset)
    neuron.vm = EL
    neuron.w = a * (neuron.vm - EL)
    neuron.Vr = linspace(-48.3 * mV, -47.7 * mV, N)  # bifurcation parameter

    #run(25*msecond,report='text') # we discard the first spikes

    MSpike = SpikeMonitor(neuron,
                          delay=1 * ms)  # record Vr and w at spike times
    MPopSpike = PopulationSpikeCounter(neuron, delay=1 * ms)
    MPopRate = PopulationRateMonitor(neuron, bin=5 * ms)
    MStateV = StateMonitor(neuron, 'vm', record=[1, 2, 3])
    MStatewMean = StateMonitor(neuron, 'w', record=False)

    MRecentStateV = RecentStateMonitor(neuron,
                                       'vm',
                                       record=[1, 2, 3],
                                       duration=10 * ms)
    MRecentStatewMean = RecentStateMonitor(neuron,
                                           'w',
                                           duration=10 * ms,
                                           record=False)

    MCounts = SpikeCounter(neuron)

    MStateSpike = StateSpikeMonitor(neuron, ('w', 'vm'))

    MMultiState = MultiStateMonitor(neuron, ['w', 'vm'], record=[6, 7, 8, 9])

    ISIHist = ISIHistogramMonitor(neuron, [0, 0.0001, 0.0002], delay=1 * ms)

    VanRossum = VanRossumMetric(neuron, tau=5 * msecond)

    run(25 * msecond, report='text')

    traj.v_standard_result = BrianMonitorResult

    traj.f_add_result('SpikeMonitor', MSpike)
    traj.f_add_result('SpikeMonitorAr',
                      MSpike,
                      storage_mode=BrianMonitorResult.ARRAY_MODE)
    traj.f_add_result('PopulationSpikeCounter', MPopSpike)
    traj.f_add_result('PopulationRateMonitor', MPopRate)
    traj.f_add_result('StateMonitorV', MStateV)
    traj.f_add_result('StateMonitorwMean', MStatewMean)
    traj.f_add_result('Counts', MCounts)

    traj.f_add_result('StateSpikevmw', MStateSpike)
    traj.f_add_result('StateSpikevmwAr',
                      MStateSpike,
                      storage_mode=BrianMonitorResult.ARRAY_MODE)
    traj.f_add_result('MultiState', MMultiState)
    traj.f_add_result('ISIHistogrammMonitor', ISIHist)
    traj.f_add_result('RecentStateMonitorV', MRecentStateV)
    traj.f_add_result('RecentStateMonitorwMean', MRecentStatewMean)
    traj.f_add_result('VanRossumMetric', VanRossum)
Exemple #44
0
    def test_wildcard_search(self):

        traj = Trajectory(name='Testwildcard', filename=make_temp_dir('wilcard.hdf5'))

        traj.f_add_parameter('expl', 2)
        traj.f_explore({'expl':[1,2,3,4]})

        traj.f_add_result('wc2test.$.hhh', 333)
        traj.f_add_leaf('results.wctest.run_00000000.jjj', 42)
        traj.f_add_result('results.wctest.run_00000001.jjj', 43)
        traj.f_add_result('results.wctest.%s.jjj' % traj.f_wildcard('$', -1), 43)

        traj.v_as_run = 1

        self.assertTrue(traj.results.wctest['$'].jjj==43)
        self.assertTrue(traj.results.wc2test.crun.hhh==333)

        traj.f_store()

        get_root_logger().info('Removing child1')

        traj.f_remove_child('results', recursive=True)

        get_root_logger().info('Doing auto-load')
        traj.v_auto_load = True

        self.assertTrue(traj.results.wctest['$'].jjj==43)
        self.assertTrue(traj.results.wc2test.crun.hhh==333)

        get_root_logger().info('Removing child2')

        traj.f_remove_child('results', recursive=True)

        get_root_logger().info('auto-loading')
        traj.v_auto_load = True

        self.assertTrue(traj.results.wctest[-1].jjj==43)
        self.assertTrue(traj.results.wc2test[-1].hhh==333)

        get_root_logger().info('Removing child3')
        traj.f_remove_child('results', recursive=True)

        get_root_logger().info('auto-loading')
        traj.v_auto_load = True

        self.assertTrue(traj.results.wctest[1].jjj==43)
        self.assertTrue(traj.results.wc2test[-1].hhh==333)

        get_root_logger().info('Done with wildcard test')
Exemple #45
0
 def test_import_star(self):
     for class_name in pypet.__all__:
         logstr = 'Evaulauting %s: %s' % (class_name, repr(eval(class_name)))
         get_root_logger().info(logstr)
Exemple #46
0
def simple_calculations(traj, arg1, simple_kwarg):

    if traj.v_idx == 0:
        # to shuffle runs
        time.sleep(0.1)

    rootlogger = get_root_logger()

    if not 'runs' in traj.res:
        traj.res.f_add_result_group('runs')

    rootlogger.info('>>>>>Starting Simple Calculations')
    my_dict = {}

    my_dict2 = {}
    param_dict = traj.parameters.f_to_dict(fast_access=True, short_names=False)
    for key in sorted(param_dict.keys())[0:5]:
        val = param_dict[key]
        if 'trial' in key:
            continue
        newkey = key.replace('.', '_')
        my_dict[newkey] = str(val)
        my_dict2[newkey] = [str(val) + ' juhu!']

    my_dict['__FLOAT'] = 44.0
    my_dict['__INT'] = 66
    my_dict['__NPINT'] = np.int_(55)
    my_dict['__INTaRRAy'] = np.array([1, 2, 3])
    my_dict['__FLOATaRRAy'] = np.array([1.0, 2.0, 41.0])
    my_dict['__FLOATaRRAy_nested'] = np.array(
        [np.array([1.0, 2.0, 41.0]),
         np.array([1.0, 2.0, 41.0])])
    my_dict['__STRaRRAy'] = np.array(['sds', 'aea', 'sf'])
    my_dict['__LONG'] = 4266
    my_dict['__UNICODE'] = u'sdfdsf'
    my_dict['__BYTES'] = b'zweiundvierzig'
    my_dict['__NUMPY_UNICODE'] = np.array([u'$%&ddss'])
    my_dict['__NUMPY_BYTES'] = np.array([b'zweiundvierzig'])

    keys = sorted(to_dict_wo_config(traj).keys())
    for idx, key in enumerate(keys[0:5]):
        keys[idx] = key.replace('.', '_')

    listy = traj.f_add_result_group('List', comment='Im a result group')
    traj.f_add_result_group('Iwiiremainempty.yo', comment='Empty group!')
    traj.Iwiiremainempty.f_store_child('yo')

    traj.Iwiiremainempty.f_add_link('kkk', listy)
    listy.f_add_link('hhh', traj.Iwiiremainempty)

    if not traj.Iwiiremainempty.kkk.v_full_name == traj.List.v_full_name:
        raise RuntimeError()

    if not traj.Iwiiremainempty.kkk.v_full_name == traj.List.hhh.kkk.v_full_name:
        raise RuntimeError()

    traj.f_add_result('runs.' + traj.v_crun + '.ggg', 5555, comment='ladida')
    traj.res.runs.f_add_result(traj.v_crun + '.ggjg', 5555, comment='didili')
    traj.res.runs.f_add_result('hhg', 5555, comment='jjjj')

    traj.res.f_add_result(name='lll', comment='duh', data=444)

    x = traj.res.f_add_result(name='nested', comment='duh')

    x['nested0.nested1.nested2.nested3'] = 44

    traj.res.f_add_result(name='test.$set.$', comment='duh', data=444)

    try:
        traj.f_add_config('teeeeest', 12)
        raise RuntimeError()
    except TypeError:
        pass

    # if not traj.f_contains('results.runs.' + traj.v_crun + '.ggjg', shortcuts=False):
    #     raise RuntimeError()
    # if not traj.f_contains('results.runs.' + traj.v_crun + '.ggg', shortcuts=False):
    #     raise RuntimeError()
    if not traj.f_contains('results.runs.' + traj.v_crun + '.hhg',
                           shortcuts=False):
        raise RuntimeError()

    traj.f_add_result('List.Of.Keys',
                      dict1=my_dict,
                      dict2=my_dict2,
                      comment='Test')
    traj.List.f_store_child('Of', recursive=True)
    traj.f_add_result('DictsNFrame', keys=keys, comment='A dict!')
    traj.f_add_result('ResMatrix', np.array([1.2, 2.3]), comment='ResMatrix')

    traj.f_add_result('empty.stuff', (), [], {},
                      np.array([]),
                      comment='empty stuff')
    #traj.f_add_derived_parameter('All.To.String', str(traj.f_to_dict(fast_access=True,short_names=False)))

    myframe = pd.DataFrame(data={
        'TC1': [1, 2, 3],
        'TC2': ['Waaa', '', ''],
        'TC3': [1.2, 42.2, 77]
    })

    myseries = myframe['TC1']

    traj.f_add_result('myseries', myseries, comment='dd')
    traj.f_store_item('myseries')

    traj.f_get('DictsNFrame').f_set(myframe)

    traj.f_add_result('IStore.SimpleThings',
                      1.0,
                      3,
                      np.float32(5.0),
                      'Iamstring', (1, 2, 3), [4, 5, 6],
                      zwei=2).v_comment = 'test'
    traj.f_add_derived_parameter('super.mega', 33, comment='It is huuuuge!')
    traj.super.f_set_annotations(AgainATestAnnotations='I am a string!111elf')

    traj.f_add_result(PickleResult,
                      'pickling.result.proto1',
                      my_dict2,
                      protocol=1,
                      comment='p1')
    traj.f_add_result(PickleResult,
                      'pickling.result.proto2',
                      my_dict2,
                      protocol=2,
                      comment='p2')
    traj.f_add_result(PickleResult,
                      'pickling.result.proto0',
                      my_dict2,
                      protocol=0,
                      comment='p0')

    traj.f_add_result(SparseResult, 'sparse.csc', traj.csc_mat,
                      42).v_comment = 'sdsa'
    traj.f_add_result(SparseResult, 'sparse.bsr', traj.bsr_mat,
                      52).v_comment = 'sdsa'
    traj.f_add_result(SparseResult, 'sparse.csr', traj.csr_mat,
                      62).v_comment = 'sdsa'
    traj.f_add_result(SparseResult, 'sparse.dia', traj.dia_mat,
                      72).v_comment = 'sdsa'

    traj.sparse.v_comment = 'I contain sparse data!'

    myobjtab = ObjectTable(data={
        'strings': ['a', 'abc', 'qwertt'],
        'ints': [1, 2, 3]
    })

    traj.f_add_result('object.table', myobjtab,
                      comment='k').v_annotations.f_set(test=42)
    traj.object.f_set_annotations(test2=42.42)

    traj.f_add_result('$.here', 77, comment='huhu')
    traj.f_add_result('tor.hot.$', dollah=77, comment='duh!')
    traj.f_add_result('tor.hot.rrr.$.j', 77, comment='duh!')
    traj.f_add_result('tor.hot.rrr.crun.jjj', 777, comment='duh**2!')

    #traj.f_add_result('PickleTerror', result_type=PickleResult, test=traj.SimpleThings)
    rootlogger.info('<<<<<<Finished Simple Calculations')

    # let's see if the traj can also always be returned
    if isinstance(traj.v_storage_service, LockWrapper):
        traj.v_storage_service.pickle_lock = False
    return 42, traj
Exemple #47
0
def simple_calculations(traj, arg1, simple_kwarg):

        if traj.v_idx == 0:
            # to shuffle runs
            time.sleep(0.1)

        rootlogger = get_root_logger()

        if not 'runs' in traj.res:
            traj.res.f_add_result_group('runs')

        rootlogger.info('>>>>>Starting Simple Calculations')
        my_dict = {}

        my_dict2={}
        param_dict=traj.parameters.f_to_dict(fast_access=True,short_names=False)
        for key in sorted(param_dict.keys())[0:5]:
            val = param_dict[key]
            if 'trial' in key:
                continue
            newkey = key.replace('.','_')
            my_dict[newkey] = str(val)
            my_dict2[newkey] = [str(val)+' juhu!']

        my_dict['__FLOAT'] = 44.0
        my_dict['__INT'] = 66
        my_dict['__NPINT'] = np.int_(55)
        my_dict['__INTaRRAy'] = np.array([1,2,3])
        my_dict['__FLOATaRRAy'] = np.array([1.0,2.0,41.0])
        my_dict['__FLOATaRRAy_nested'] = np.array([np.array([1.0,2.0,41.0]),np.array([1.0,2.0,41.0])])
        my_dict['__STRaRRAy'] = np.array(['sds','aea','sf'])
        my_dict['__LONG'] = compat.long_type(4266)
        my_dict['__UNICODE'] = u'sdfdsf'
        my_dict['__BYTES'] = b'zweiundvierzig'
        my_dict['__NUMPY_UNICODE'] = np.array([u'$%&ddss'])
        my_dict['__NUMPY_BYTES'] = np.array([b'zweiundvierzig'])

        keys = sorted(to_dict_wo_config(traj).keys())
        for idx,key in enumerate(keys[0:5]):
            keys[idx] = key.replace('.', '_')

        listy=traj.f_add_result_group('List', comment='Im a result group')
        traj.f_add_result_group('Iwiiremainempty.yo', comment='Empty group!')
        traj.Iwiiremainempty.f_store_child('yo')

        traj.Iwiiremainempty.f_add_link('kkk',listy )
        listy.f_add_link('hhh', traj.Iwiiremainempty)

        if not traj.Iwiiremainempty.kkk.v_full_name == traj.List.v_full_name:
            raise RuntimeError()

        if not traj.Iwiiremainempty.kkk.v_full_name == traj.List.hhh.kkk.v_full_name:
            raise RuntimeError()

        traj.f_add_result('runs.' + traj.v_crun + '.ggg', 5555, comment='ladida')
        traj.res.runs.f_add_result(traj.v_crun + '.ggjg', 5555, comment='didili')
        traj.res.runs.f_add_result('hhg', 5555, comment='jjjj')

        traj.res.f_add_result(name='lll', comment='duh', data=444)

        x = traj.res.f_add_result(name='nested', comment='duh')

        x['nested0.nested1.nested2.nested3'] =  44

        traj.res.f_add_result(name='test.$set.$', comment='duh', data=444)

        try:
            traj.f_add_config('teeeeest', 12)
            raise RuntimeError()
        except TypeError:
            pass

        # if not traj.f_contains('results.runs.' + traj.v_crun + '.ggjg', shortcuts=False):
        #     raise RuntimeError()
        # if not traj.f_contains('results.runs.' + traj.v_crun + '.ggg', shortcuts=False):
        #     raise RuntimeError()
        if not traj.f_contains('results.runs.' + traj.v_crun + '.hhg', shortcuts=False):
            raise RuntimeError()

        traj.f_add_result('List.Of.Keys', dict1=my_dict, dict2=my_dict2, comment='Test')
        traj.List.f_store_child('Of', recursive=True)
        traj.f_add_result('DictsNFrame', keys=keys, comment='A dict!')
        traj.f_add_result('ResMatrix',np.array([1.2,2.3]), comment='ResMatrix')

        traj.f_add_result('empty.stuff', (), [], {}, np.array([]), comment='empty stuff')
        #traj.f_add_derived_parameter('All.To.String', str(traj.f_to_dict(fast_access=True,short_names=False)))

        myframe = pd.DataFrame(data ={'TC1':[1,2,3],'TC2':['Waaa','',''],'TC3':[1.2,42.2,77]})

        myseries = myframe['TC1']

        mypanel = pd.Panel({'Item1' : pd.DataFrame(np.ones((4, 3))),'Item2' : pd.DataFrame(np.ones((4, 2)))})

        # p4d = pd.Panel4D(np.random.randn(2, 2, 5, 4),
        #     labels=['Label1','Label2'],
        #    items=['Item1', 'Item2'],
        #    major_axis=pd.date_range('1/1/2000', periods=5),
        #   minor_axis=['A', 'B', 'C', 'D'])


        traj.f_add_result('myseries', myseries, comment='dd')
        traj.f_store_item('myseries')
        traj.f_add_result('mypanel', mypanel, comment='dd')
        #traj.f_add_result('mypanel4d', p4d, comment='dd')

        traj.f_get('DictsNFrame').f_set(myframe)

        traj.f_add_result('IStore.SimpleThings',1.0,3,np.float32(5.0),
                          'Iamstring',
                          (1,2,3), [4,5,6],
                          zwei=2).v_comment='test'
        traj.f_add_derived_parameter('super.mega',33, comment='It is huuuuge!')
        traj.super.f_set_annotations(AgainATestAnnotations='I am a string!111elf')

        traj.f_add_result(PickleResult,'pickling.result.proto1', my_dict2, protocol=1, comment='p1')
        traj.f_add_result(PickleResult,'pickling.result.proto2', my_dict2, protocol=2, comment='p2')
        traj.f_add_result(PickleResult,'pickling.result.proto0', my_dict2, protocol=0, comment='p0')

        traj.f_add_result(SparseResult, 'sparse.csc', traj.csc_mat, 42).v_comment='sdsa'
        traj.f_add_result(SparseResult, 'sparse.bsr', traj.bsr_mat, 52).v_comment='sdsa'
        traj.f_add_result(SparseResult, 'sparse.csr', traj.csr_mat, 62).v_comment='sdsa'
        traj.f_add_result(SparseResult, 'sparse.dia', traj.dia_mat, 72).v_comment='sdsa'

        traj.sparse.v_comment = 'I contain sparse data!'

        myobjtab = ObjectTable(data={'strings':['a','abc','qwertt'], 'ints':[1,2,3]})

        traj.f_add_result('object.table', myobjtab, comment='k').v_annotations.f_set(test=42)
        traj.object.f_set_annotations(test2=42.42)

        traj.f_add_result('$.here', 77, comment='huhu')
        traj.f_add_result('tor.hot.$', dollah=77, comment='duh!')
        traj.f_add_result('tor.hot.rrr.$.j', 77, comment='duh!')
        traj.f_add_result('tor.hot.rrr.crun.jjj', 777, comment='duh**2!')

        #traj.f_add_result('PickleTerror', result_type=PickleResult, test=traj.SimpleThings)
        rootlogger.info('<<<<<<Finished Simple Calculations')

        # let's see if the traj can also always be returned
        if isinstance(traj.v_storage_service, LockWrapper):
            traj.v_storage_service.pickle_lock = False
        return 42, traj
Exemple #48
0
 def test_import_star(self):
     for class_name in pypet.brian2.__all__:
         logstr = 'Evaluauting %s: %s' % (class_name, repr(
             eval(class_name)))
         get_root_logger().info(logstr)
def run_net(traj):

    clear(True, True)
    get_root_logger().info(traj.defaultclock)
    defaultclock.dt=traj.defaultclock

    C=traj.C
    gL=traj.gL
    EL=traj.EL
    VT=traj.VT
    DeltaT=traj.DeltaT
    tauw=traj.tauw
    a=traj.a
    b=traj.b
    I=traj.I
    Vcut=traj.Vcut# practical threshold condition
    N=traj.N

    eqs=traj.eqs

    neuron=NeuronGroup(N,model=eqs,threshold=Vcut,reset=traj.reset)
    neuron.vm=EL
    neuron.w=a*(neuron.vm-EL)
    neuron.Vr=linspace(-48.3*mV,-47.7*mV,N) # bifurcation parameter

    #run(25*msecond,report='text') # we discard the first spikes

    MSpike=SpikeMonitor(neuron, delay = 1*ms) # record Vr and w at spike times
    MPopSpike =PopulationSpikeCounter(neuron, delay = 1*ms)
    MPopRate = PopulationRateMonitor(neuron,bin=5*ms)
    MStateV = StateMonitor(neuron,'vm',record=[1,2,3])
    MStatewMean = StateMonitor(neuron,'w',record=False)

    MRecentStateV = RecentStateMonitor(neuron,'vm',record=[1,2,3],duration=10*ms)
    MRecentStatewMean = RecentStateMonitor(neuron,'w',duration=10*ms,record=False)

    MCounts = SpikeCounter(neuron)

    MStateSpike = StateSpikeMonitor(neuron,('w','vm'))

    MMultiState = MultiStateMonitor(neuron,['w','vm'],record=[6,7,8,9])

    ISIHist = ISIHistogramMonitor(neuron,[0,0.0001,0.0002], delay = 1*ms)

    VanRossum = VanRossumMetric(neuron, tau=5*msecond)

    run(25*msecond,report='text')

    traj.v_standard_result = BrianMonitorResult

    traj.f_add_result('SpikeMonitor', MSpike)
    traj.f_add_result('SpikeMonitorAr', MSpike, storage_mode = BrianMonitorResult.ARRAY_MODE)
    traj.f_add_result('PopulationSpikeCounter', MPopSpike)
    traj.f_add_result('PopulationRateMonitor',MPopRate)
    traj.f_add_result('StateMonitorV', MStateV)
    traj.f_add_result('StateMonitorwMean', MStatewMean)
    traj.f_add_result('Counts',MCounts)

    traj.f_add_result('StateSpikevmw', MStateSpike)
    traj.f_add_result('StateSpikevmwAr', MStateSpike,storage_mode = BrianMonitorResult.ARRAY_MODE)
    traj.f_add_result('MultiState',MMultiState)
    traj.f_add_result('ISIHistogrammMonitor',ISIHist)
    traj.f_add_result('RecentStateMonitorV', MRecentStateV)
    traj.f_add_result('RecentStateMonitorwMean', MRecentStatewMean)
    traj.f_add_result('VanRossumMetric', VanRossum)
def write_into_shared_storage(traj):
    traj.f_add_result('ggg', 42)
    traj.f_add_derived_parameter('huuu', 46)

    root = get_root_logger()
    daarrays = traj.res.daarrays
    idx = traj.v_idx
    ncores = traj[traj.v_environment_name].f_get_default('ncores', 1)
    root.info('1. This')
    a = daarrays.a
    a[idx] = idx
    root.info('2. is')
    ca = daarrays.ca
    ca[idx] = idx
    root.info('3. a')
    ea = daarrays.ea
    ea.append(np.ones((1,10))*idx)
    root.info('4. sequential')
    vla = daarrays.vla
    vla.append(np.ones(idx+2)*idx)
    root.info('5. Block')
    the_range = list(range(max(0, idx-2*ncores), max(0, idx)))
    for irun in the_range:
        x, y = a[irun], irun
        if x != y and x != 0:
            raise RuntimeError('ERROR in write_into_shared_storage %s != %s' % (str(x), str(y)))
        x, y = ca[irun], irun
        if x != y and x != 0:
            raise RuntimeError('ERROR in write_into_shared_storage %s != %s' % (str(x), str(y)))
        try:
            x, y = ea[irun, 9], ea[irun, 8]
            if x != y and x != 0:
                raise RuntimeError('ERROR in write_into_shared_storage %s != %s' % (str(x), str(y)))
        except IndexError:
            pass  # Array is not at this size yet
        try:
            x, y = vla[irun][0], vla[irun][1]
            if x != y and x != 0:
                raise RuntimeError('ERROR in write_into_shared_storage %s != %s' % (str(x), str(y)))
        except IndexError:
            pass  # Array is not at this size yet
    root.info('6. !!!!!!!!!')

    tabs = traj.tabs

    with StorageContextManager(traj) as cm:
        t1 = tabs.t1
        row = t1.row
        row['run_name'] = traj.v_crun.encode('utf-8')
        row['idx'] = idx
        row.append()
        t1.flush()

    t2 = tabs.t2
    row = t2[idx]
    if row['run_name'] != traj.v_crun.encode('utf-8'):
        raise RuntimeError('Names in run table do not match, Run: %s != %s' % (row['run_name'],
                                                                                   traj.v_crun) )

    df = traj.df
    df.append(pd.DataFrame({'idx':[traj.v_idx], 'run_name':traj.v_crun}))
Exemple #51
0
    def test_wildcard_search(self):

        traj = Trajectory(name='Testwildcard', filename=make_temp_dir('wilcard.hdf5'),
                          add_time=True)

        traj.f_add_parameter('expl', 2)
        traj.f_explore({'expl':[1,2,3,4]})

        traj.f_add_result('wc2test.$.hhh', 333)
        traj.f_add_leaf('results.wctest.run_00000000.jjj', 42)
        traj.f_add_result('results.wctest.run_00000001.jjj', 43)
        traj.f_add_result('results.wctest.%s.jjj' % traj.f_wildcard('$', -1), 43)

        traj.v_crun = 1

        self.assertTrue(traj.results.wctest['$'].jjj==43)
        self.assertTrue(traj.results.wc2test.crun.hhh==333)

        traj.f_store()

        get_root_logger().info('Removing child1')

        traj.f_remove_child('results', recursive=True)

        get_root_logger().info('Doing auto-load')
        traj.v_auto_load = True

        self.assertTrue(traj.results.wctest['$'].jjj==43)
        self.assertTrue(traj.results.wc2test.crun.hhh==333)

        get_root_logger().info('Removing child2')

        traj.f_remove_child('results', recursive=True)

        get_root_logger().info('auto-loading')
        traj.v_auto_load = True

        self.assertTrue(traj.results.wctest[-1].jjj==43)
        self.assertTrue(traj.results.wc2test[-1].hhh==333)

        get_root_logger().info('Removing child3')
        traj.f_remove_child('results', recursive=True)

        get_root_logger().info('auto-loading')
        traj.v_auto_load = True

        self.assertTrue(traj.results.wctest[1].jjj==43)
        self.assertTrue(traj.results.wc2test[-1].hhh==333)

        get_root_logger().info('Done with wildcard test')
Exemple #52
0
def write_into_shared_storage(traj):
    traj.f_add_result('ggg', 42)
    traj.f_add_derived_parameter('huuu', 46)

    root = get_root_logger()
    daarrays = traj.res.daarrays
    idx = traj.v_idx
    ncores = traj[traj.v_environment_name].f_get_default('ncores', 1)
    root.info('1. This')
    a = daarrays.a
    a[idx] = idx
    root.info('2. is')
    ca = daarrays.ca
    ca[idx] = idx
    root.info('3. a')
    ea = daarrays.ea
    ea.append(np.ones((1, 10)) * idx)
    root.info('4. sequential')
    vla = daarrays.vla
    vla.append(np.ones(idx + 2) * idx)
    root.info('5. Block')
    the_range = list(range(max(0, idx - 2 * ncores), max(0, idx)))
    for irun in the_range:
        x, y = a[irun], irun
        if x != y and x != 0:
            raise RuntimeError('ERROR in write_into_shared_storage %s != %s' %
                               (str(x), str(y)))
        x, y = ca[irun], irun
        if x != y and x != 0:
            raise RuntimeError('ERROR in write_into_shared_storage %s != %s' %
                               (str(x), str(y)))
        try:
            x, y = ea[irun, 9], ea[irun, 8]
            if x != y and x != 0:
                raise RuntimeError(
                    'ERROR in write_into_shared_storage %s != %s' %
                    (str(x), str(y)))
        except IndexError:
            pass  # Array is not at this size yet
        try:
            x, y = vla[irun][0], vla[irun][1]
            if x != y and x != 0:
                raise RuntimeError(
                    'ERROR in write_into_shared_storage %s != %s' %
                    (str(x), str(y)))
        except IndexError:
            pass  # Array is not at this size yet
    root.info('6. !!!!!!!!!')

    tabs = traj.tabs

    with StorageContextManager(traj) as cm:
        t1 = tabs.t1
        row = t1.row
        row['run_name'] = compat.tobytes(traj.v_crun)
        row['idx'] = idx
        row.append()
        t1.flush()

    t2 = tabs.t2
    row = t2[idx]
    if row['run_name'] != compat.tobytes(traj.v_crun):
        raise RuntimeError('Names in run table do not match, Run: %s != %s' %
                           (row['run_name'], traj.v_crun))

    df = traj.df
    df.append(pd.DataFrame({'idx': [traj.v_idx], 'run_name': traj.v_crun}))