def setUp(self): #These records are from Record 1 of Data Set 164 self.fields = [ Field(int, 10, 'units_code', 'units code'), Field(str, 20, 'units_description', 'units description'), Field(int, 10, 'temperature_mode', '1 - absolute, 2 - relative') ] self.tokenizer = Tokenizer( ' 2Foot (pound f) 1\n')
def setUp(self): self.buffer = ''' -1 151 C:\Users\oy\Documents\Projects\bitbucket\UNV\data\UNVScenario.CATAnalysis NONE LMS Virtual.Lab Rev 11-SL1 LMS Virtual.Lab Rev 11-SL1 17-Apr-13 19:32:03 -1 -1 164 1 SI - mks (Newton) 2 1.00000000000000000e+00 1.00000000000000000e+00 1.00000000000000000e+00 0.00000000000000000e+00 -1 -1 2411 67 1 1 1 0.000000000000000e+00 0.000000000000000e+00 0.000000000000000e+00 68 1 1 1 0.000000000000000e+00 2.000000000000000e-02 0.000000000000000e+00 69 1 1 1 0.000000000000000e+00 4.000000000000000e-02 0.000000000000000e+00 -1 ''' self.tokenizer = Tokenizer(self.buffer)
def test_read_data_RaisesValueErrorIfDataSetEndsInTheMiddle(self): buffer = self.buffer2411[:25] + data_set_end + '\n' + self.buffer2411[ 25:] with Tokenizer(buffer) as tokenizer: dataSet = get_data_set(2411, tokenizer) with self.assertRaises(ValueError): dataSet.read_data()
def test_read_MultiDimensionData(self): field = Field(int, 2, '', '', 5) with Tokenizer(' 1 3 5 7 9') as tokenizer: values = field.read(tokenizer) self.assertEqual(len(values), 5) for i in range(5): self.assertEqual(values[i], i * 2 + 1)
def setUp(self): #These records are from Record 1 of Data Set 164 self.fields = [Field(int, 10, 'units_code', 'units code') , Field(str, 20, 'units_description', 'units description') , Field(int, 10, 'temperature_mode', '1 - absolute, 2 - relative') ] self.tokenizer = Tokenizer(' 2Foot (pound f) 1\n')
def __init__(self, stream): ''' ''' assert(stream) self.tokenizer = Tokenizer(stream) self.dataSets = [dataSet for dataSet in data_sets(self.tokenizer)] assert(self.dataSets[0].number == 151) assert(self.dataSets[1].number == 164)
def test_data_ProvidesAccessToTheReadData(self): with Tokenizer(self.buffer2411) as tokenizer: dataSet = get_data_set(2411, tokenizer) self.assertEqual(len(dataSet.data), 3) for i in range(len(dataSet.data)): self.assertEqual(dataSet.data[i].node_label, 67 + i) self.assertEqual(dataSet.data[i].coordinate[1], 0.02 * i)
def test_read_data_ReadsTheWholeBuffer(self): with Tokenizer(self.buffer2411) as tokenizer: dataSet = get_data_set(2411, tokenizer) data = dataSet.read_data() self.assertEqual(len(data), 3) for i in range(len(data)): self.assertEqual(data[i].node_label, 67 + i) self.assertEqual(data[i].coordinate[1], 0.02 * i)
def test_read_data_ReadsTheBufferUntillDataSetEndIdentifier(self): buffer = self.buffer2411 + data_set_end + '\n' + self.buffer2411 with Tokenizer(buffer) as tokenizer: dataSet = get_data_set(2411, tokenizer) data = dataSet.read_data() self.assertEqual(len(data), 3) for i in range(len(data)): self.assertEqual(data[i].node_label, 67 + i) self.assertEqual(data[i].coordinate[1], 0.02 * i)
def test_DataSetInitializedDataRecordsForComplexSingleDataWithArguents(self): with Tokenizer(open(data_folder() + '58_complex_data_with_argument.unv', 'r')) as tokenizer: dataSet = DataSet58(tokenizer) values = dataSet.read_definition() self.assertEquals(len(dataSet.data_records), 1) data = dataSet.read_data() for i in range(values.data.number_of_data_points): self.assertEqual(data[i].abscissa, 10.0 * (i + 1)) self.assertEqual(data[i].value, complex(1.0 * (i + 1) + 0.1, 1.0 * (i + 1) + 0.2))
def test_read_HandlesNewLineCharactersInTheBuffer(self): buffer = ''' 0.00000000000000000e+00 2.00000000000000000e+00 4.00000000000000000e+00 6.00000000000000000e+00''' fields = [] for i in range(4): fields.append(Field(float, (25, 17), 'value_%i' % i, '')) record = Record(fields) values = record.read(Tokenizer(buffer)) for i in range(4): self.assertEqual(values['value_%i' % i], i * 2.0)
def test_read_FullDataSetThroughValuesMember(self): with Tokenizer(self.buffer164) as tokenizer: dataSet = get_data_set(164, tokenizer) self.assertEqual(dataSet.values.units_code, 1) self.assertEqual(dataSet.values.units_desc, 'SI - mks (Newton)') self.assertEqual(dataSet.values.temperature_mode, 2) self.assertEqual(dataSet.values.length, 1.0) self.assertEqual(dataSet.values.force, 2.0) self.assertEqual(dataSet.values.temperature, 3.0) self.assertEqual(dataSet.values.temperature_offset, 4.0)
def test_read_FullDataSetThroughNamedRecords(self): with Tokenizer(self.buffer164) as tokenizer: dataSet = get_data_set(164, tokenizer) dataSet.definition_records[0].name = 'units' self.assertEqual(dataSet.values.units.units_code, 1) self.assertEqual(dataSet.values.units.units_desc, 'SI - mks (Newton)') self.assertEqual(dataSet.values.units.temperature_mode, 2) self.assertEqual(dataSet.values.length, 1.0) self.assertEqual(dataSet.values.force, 2.0) self.assertEqual(dataSet.values.temperature, 3.0) self.assertEqual(dataSet.values.temperature_offset, 4.0) dataSet.definition_records[0].name = None
def setUp(self): #FORMAT(I10,20A1,I10) #FORMAT(3D25.17) self.buffer164 = ''' 1SI - mks (Newton) 2 1.00000000000000000e+00 2.00000000000000000e+00 3.00000000000000000e+00 4.00000000000000000e+00 ''' self.buffer2411 = ''' 67 1 1 1 0.000000000000000e+00 0.000000000000000e+00 0.000000000000000e+00 68 1 1 1 0.000000000000000e+00 2.000000000000000e-02 0.000000000000000e+00 69 1 1 1 0.000000000000000e+00 4.000000000000000e-02 0.000000000000000e+00 ''' self.tokenizer2411 = Tokenizer(self.buffer2411)
def test_DataSetReadsDefinition(self): with Tokenizer(open(data_folder() + '58_real_data_with_argument.unv', 'r')) as tokenizer: dataSet = DataSet58(tokenizer) self.assertEquals(dataSet.values.id_lines[1], 'Force_Time:+X') #... self.assertEquals(dataSet.values.id_lines[5], 'Default condition') #Record 6 self.assertEquals(dataSet.values.dof.function_type, 1) self.assertEquals(dataSet.values.dof.response_entity_name, 'Force_Time') #Record 7 self.assertEquals(dataSet.values.data.ordinate_data_type, 2) self.assertEquals(dataSet.values.data.number_of_data_points, 10) #Record 8 self.assertEquals(dataSet.values.abscissa.data_type, 17) self.assertEquals(dataSet.values.abscissa.axis_units_label, 's') #Record 9 self.assertEquals(dataSet.values.ordinate_numerator.data_type, 13) self.assertEquals(dataSet.values.ordinate_numerator.axis_units_label, 'N') #Record 10 self.assertEquals(dataSet.values.ordinate_denominator.data_type, 0) self.assertEquals(dataSet.values.ordinate_denominator.axis_units_label, 'NONE') #Record 11 self.assertEquals(dataSet.values.z_axis.data_type, 0) self.assertEquals(dataSet.values.z_axis.axis_units_label, 'NONE')
def test_read_RaisesValueErrorIfNotEnoughValueFoundForMultiDimensionData(self): field = Field(int, 2, '', '', 7) with Tokenizer(' 1 3 5 7 9') as tokenizer: with self.assertRaises(ValueError): field.read(tokenizer)
def test_read_data_RaisesValueErrorIfStreamEndsInTheMiddle(self): with Tokenizer(self.buffer2411[:25]) as tokenizer: dataSet = get_data_set(2411, tokenizer) with self.assertRaises(ValueError): dataSet.read_data()
def test_read_HandlesComplex(self): field = Field(complex, (25, 17), '', '') with Tokenizer(' 1.00000000000000000e+01 1.00000000000000000e+02') as tokenizer: self.assertEqual(field.read(tokenizer), complex(10.0, 100.0))
def test_read_HandlesFloat(self): field = Field(float, (25, 17), '', '') with Tokenizer(' 1.00000000000000000e+01') as tokenizer: self.assertEqual(field.read(tokenizer), 10.0)
def test_read_HandlesInt(self): field = Field(int, 10, '', '') with Tokenizer(' 5') as tokenizer: self.assertEqual(field.read(tokenizer), 5)
def test_write_FullDataSet(self): with Tokenizer(self.buffer164) as tokenizer: dataSet = get_data_set(164, tokenizer) dataSet.read_definition() self.assertEqual(self.buffer164, dataSet.write())
def test_get_data_set_number_RaisesValueErrorForNonNumbers(self): with self.assertRaises(ValueError): tokenizer = Tokenizer(' a151\n -1\n') get_data_set_number(tokenizer)
def test_get_data_set_number_ReturnsTheDataSetNumber(self): tokenizer = Tokenizer(' 151\n -1\n') self.assertEqual(get_data_set_number(tokenizer), 151)
def test_skip_RaisesValueErrorIfEndMarkerIsNotFound(self): buffer = ' asome thjext sdflsk lk; sd;l kasdf\n \n -1\nNewText' with Tokenizer(buffer) as tokenizer: dataSet = DataSet(0, [], [], tokenizer) with self.assertRaises(ValueError): dataSet.skip()
def test_DataSetIsStartedWithEmptyDataRecords(self): with Tokenizer('') as tokenizer: dataSet = DataSet58(tokenizer) self.assertEquals(len(dataSet.definition_records), 11) self.assertEquals(len(dataSet.data_records), 0)
def test_read_RaisesValueErrorIfDataSetSeperatorWithinFoundForMultiDimensionData(self): field = Field(int, 2, '', '', 7) with Tokenizer(' 1 3 5\n' + dataset_marker + '\n 7 9') as tokenizer: with self.assertRaises(ValueError): field.read(tokenizer)
class TestRecord(unittest.TestCase): def setUp(self): #These records are from Record 1 of Data Set 164 self.fields = [ Field(int, 10, 'units_code', 'units code'), Field(str, 20, 'units_description', 'units description'), Field(int, 10, 'temperature_mode', '1 - absolute, 2 - relative') ] self.tokenizer = Tokenizer( ' 2Foot (pound f) 1\n') def tearDown(self): pass #Defaults def test_defaults_returns_default_values_for_the_fields(self): record = Record(self.fields) values = record.defaults() self.assertEqual(int(), values.units_code) #Read def test_read_ValuesCanBeAccessedByName(self): record = Record(self.fields) values = record.read(self.tokenizer) self.assertEqual(values.units_code, 2) self.assertEqual(values.units_description, 'Foot (pound f)') self.assertEqual(values.temperature_mode, 1) def test_read_HandlesNewLineCharactersInTheBuffer(self): buffer = ''' 0.00000000000000000e+00 2.00000000000000000e+00 4.00000000000000000e+00 6.00000000000000000e+00''' fields = [] for i in range(4): fields.append(Field(float, (25, 17), 'value_%i' % i, '')) record = Record(fields) values = record.read(Tokenizer(buffer)) for i in range(4): self.assertEqual(values['value_%i' % i], i * 2.0) ##Write def test_write_AcceptValuesAsNamedParameters(self): record = Record(self.fields) buffer = record.write(units_code=2, units_description='Foot (pound f)', temperature_mode=1) self.assertEqual(buffer, self.tokenizer.read_all()) def test_write_AcceptValuesAsDictionary(self): record = Record(self.fields) buffer = record.write({ 'units_code': 2, 'units_description': 'Foot (pound f)', 'temperature_mode': 1 }) self.assertEqual(buffer, self.tokenizer.read_all()) def test_write_HandlesNewLineCharactersInTheBuffer(self): buffer = ''' 0.00000000000000000e+00 2.00000000000000000e+00 4.00000000000000000e+00 6.00000000000000000e+00\n''' fields = [] values = {} for i in range(4): fields.append(Field(float, (25, 17), 'value_%i' % i, '')) values['value_%i' % i] = i * 2.0 record = Record(fields) self.assertEqual(buffer, record.write(values)) def test_writes_two_max_length_strings_in_two_lines(self): fields = [] values = {} for i in range(2): fields.append(Field(str, 80, 'value_%i' % i, '')) values['value_%i' % i] = 'a' record = Record(fields) buffer = 'a' + ' ' * 79 + '\n' + 'a' + ' ' * 79 + '\n' self.assertEqual(buffer, record.write(values))
def test_skip_GoesTillTheBeginningOfNextDataSet(self): buffer = ' asome thjext sdflsk lk; sd;l kasdf\n -1\nNewText' with Tokenizer(buffer) as tokenizer: dataSet = DataSet(0, [], [], tokenizer) dataSet.skip() self.assertEqual(tokenizer.read_line(), 'NewText')
def test_read_HandlesString(self): field = Field(str, 10, '', '') with Tokenizer(' 5') as tokenizer: self.assertEqual(field.read(tokenizer), '5')
class TestRecord(unittest.TestCase): def setUp(self): #These records are from Record 1 of Data Set 164 self.fields = [Field(int, 10, 'units_code', 'units code') , Field(str, 20, 'units_description', 'units description') , Field(int, 10, 'temperature_mode', '1 - absolute, 2 - relative') ] self.tokenizer = Tokenizer(' 2Foot (pound f) 1\n') def tearDown(self): pass #Defaults def test_defaults_returns_default_values_for_the_fields(self): record = Record(self.fields) values = record.defaults() self.assertEqual(int(), values.units_code) #Read def test_read_ValuesCanBeAccessedByName(self): record = Record(self.fields) values = record.read(self.tokenizer) self.assertEqual(values.units_code, 2) self.assertEqual(values.units_description, 'Foot (pound f)') self.assertEqual(values.temperature_mode, 1) def test_read_HandlesNewLineCharactersInTheBuffer(self): buffer = ''' 0.00000000000000000e+00 2.00000000000000000e+00 4.00000000000000000e+00 6.00000000000000000e+00''' fields = [] for i in range(4): fields.append(Field(float, (25, 17), 'value_%i' % i, '')) record = Record(fields) values = record.read(Tokenizer(buffer)) for i in range(4): self.assertEqual(values['value_%i' % i], i * 2.0) ##Write def test_write_AcceptValuesAsNamedParameters(self): record = Record(self.fields) buffer = record.write(units_code = 2, units_description = 'Foot (pound f)', temperature_mode = 1) self.assertEqual(buffer, self.tokenizer.read_all()) def test_write_AcceptValuesAsDictionary(self): record = Record(self.fields) buffer = record.write({'units_code' : 2, 'units_description' : 'Foot (pound f)', 'temperature_mode' : 1}) self.assertEqual(buffer, self.tokenizer.read_all()) def test_write_HandlesNewLineCharactersInTheBuffer(self): buffer = ''' 0.00000000000000000e+00 2.00000000000000000e+00 4.00000000000000000e+00 6.00000000000000000e+00\n''' fields = [] values = {} for i in range(4): fields.append(Field(float, (25, 17), 'value_%i' % i, '')) values['value_%i' % i] = i * 2.0 record = Record(fields) self.assertEqual(buffer, record.write(values)) def test_writes_two_max_length_strings_in_two_lines(self): fields = [] values = {} for i in range(2): fields.append(Field(str, 80, 'value_%i' % i, '')) values['value_%i' % i] = 'a' record = Record(fields) buffer = 'a' + ' ' * 79 + '\n' + 'a' + ' ' * 79 + '\n' self.assertEqual(buffer, record.write(values))