def test_skip_variable(): # Test skipping over the first of two variables in a MAT file # using mat_reader_factory and put_variables to read them in. # # This is a regression test of a problem that's caused by # using the compressed file reader seek instead of the raw file # I/O seek when skipping over a compressed chunk. # # The problem arises when the chunk is large: this file has # a 256x256 array of random (uncompressible) doubles. # filename = pjoin(test_data_path,'test_skip_variable.mat') # # Prove that it loads with loadmat # d = loadmat(filename, struct_as_record=True) yield assert_, 'first' in d yield assert_, 'second' in d # # Make the factory # factory = mat_reader_factory(filename, struct_as_record=True) # # This is where the factory breaks with an error in MatMatrixGetter.to_next # d = factory.get_variables('second') yield assert_, 'second' in d factory.mat_stream.close()
def test_skip_variable(): # Test skipping over the first of two variables in a MAT file # using mat_reader_factory and put_variables to read them in. # # This is a regression test of a problem that's caused by # using the compressed file reader seek instead of the raw file # I/O seek when skipping over a compressed chunk. # # The problem arises when the chunk is large: this file has # a 256x256 array of random (uncompressible) doubles. # filename = pjoin(test_data_path,'test_skip_variable.mat') # # Prove that it loads with loadmat # d = loadmat(filename, struct_as_record=True) yield assert_true, d.has_key('first') yield assert_true, d.has_key('second') # # Make the factory # factory = mat_reader_factory(filename, struct_as_record=True) # # This is where the factory breaks with an error in MatMatrixGetter.to_next # d = factory.get_variables('second') yield assert_true, d.has_key('second') factory.mat_stream.close()
if form_data.has_key("memory_limt") else 2000, "timeout": float(form_data["timeout"].value), "cpcluster": CPCluster, "batch_file": batch_file } error = CheckParameters(my_batch) if error: exception = RuntimeError() exception.message = error raise exception # # This is somewhat dependent on internals, but we mrf = mat_reader_factory(batch_file, struct_as_record=True) batch_info = mrf.get_variables(['number_of_image_sets']) if batch_info.has_key('number_of_image_sets'): my_batch["num_sets"] = batch_info['number_of_image_sets'][0, 0] else: batch_info = loadmat(batch_file, struct_as_record=True) my_batch["num_sets"] = batch_info['handles']['Current'][ 0, 0]['NumberOfImageSets'][0, 0][0, 0] runs = PartitionBatches(my_batch) batch_id = RunBatch.CreateBatchRun(my_batch) results = RunBatch.RunAll(batch_id) text = [] text.append("<html>") text.append("<head><title>Batch # %d</title>" % (batch_id)) text.append("<style type='text/css'>") text.append("""
"data_dir": form_data["data_dir"].value, "write_data": (form_data["write_data"].value.upper() == "Y" and 1) or 0, "batch_size": int(form_data["batch_size"].value), "memory_limit": float(form_data["memory_limit"].value) if form_data.has_key("memory_limt") else 2000, "timeout": float(form_data["timeout"].value), "cpcluster": CPCluster, "batch_file": batch_file, } error = CheckParameters(my_batch) if error: exception = RuntimeError() exception.message = error raise exception # # This is somewhat dependent on internals, but we mrf = mat_reader_factory(batch_file, struct_as_record=True) batch_info = mrf.get_variables(["number_of_image_sets"]) if batch_info.has_key("number_of_image_sets"): my_batch["num_sets"] = batch_info["number_of_image_sets"][0, 0] else: batch_info = loadmat(batch_file, struct_as_record=True) my_batch["num_sets"] = batch_info["handles"]["Current"][0, 0]["NumberOfImageSets"][0, 0][0, 0] runs = PartitionBatches(my_batch) batch_id = RunBatch.CreateBatchRun(my_batch) results = RunBatch.RunAll(batch_id) text = [] text.append("<html>") text.append("<head><title>Batch # %d</title>" % (batch_id)) text.append("<style type='text/css'>") text.append( """