def __init__(self, convertible_to): """Initialize the UnitsParser. Args: convertible_to: Either an individual unit specification or a series of unit specifications, where each unit specification is either a string (e.g. 'byte') or a units.Unit. The parser input must be convertible to at least one of the specified Units, or the parse() method will raise a ValueError. """ if isinstance(convertible_to, (basestring, units.Unit)): self.convertible_to = [units.Unit(convertible_to)] else: self.convertible_to = [units.Unit(u) for u in convertible_to]
def testNewUnitRegistry(self): # The fundamental issue with pickling Pint Quantities is that you # need all of your Quantities to point to the same UnitRegistry # object, and when we close and reopen PKB, we create a new # UnitRegistry. So to test it, we create a new UnitRegistry. q_prepickle = 1.0 * units.Unit('second') q_pickled = pickle.dumps(q_prepickle) units._UNIT_REGISTRY = units._UnitRegistry() q_postpickle = pickle.loads(q_pickled) new_second = 1.0 * units.Unit('second') self.assertEqual(q_postpickle, new_second) # This next line checks that q_postpickle is in the same "Pint # universe" as new_second, because we can convert q_postpickle to # the units of new_second. q_postpickle.to(new_second)
def testCustomBlocksize(self): orig_blocksize = fio_benchmark.SCENARIOS['sequential_write']['blocksize'] job_file = fio_benchmark.GenerateJobFileString( self.filename, ['sequential_read'], [1], None, units.Unit('megabyte') * 2) self.assertIn('blocksize=2000000B', job_file) # Test that generating a job file doesn't modify the global # SCENARIOS variable. self.assertEqual(fio_benchmark.SCENARIOS['sequential_write']['blocksize'], orig_blocksize)
def testConvertibleToSeries(self): up = flag_util.UnitsParser(convertible_to=(units.byte, 'second')) self.assertEqual(up.parse('10 MB'), 10 * units.Unit('megabyte')) self.assertEqual(up.parse('10 minutes'), 10 * units.Unit('minute')) with self.assertRaises(ValueError): up.parse('1 meter')
def testParseExplicitMemory(self): q = self._parser.Parse('30 GiB') self.assertEqual(q.magnitude, 30) self.assertEqual(q.units, units.Unit('gibibyte'))
def testIndirectIO(self): job_file = fio_benchmark.GenerateJobFileString( self.filename, ['sequential_read'], [1], [1], None, units.Unit('megabyte') * 2, 600, False, {}) self.assertIn('direct=0', job_file)
def testSameUnitRegistry(self): q_prepickle = 1.0 * units.Unit('second') q_pickled = pickle.dumps(q_prepickle) q_postpickle = pickle.loads(q_pickled) self.assertEqual(q_prepickle, q_postpickle)
def testUnitNotEqual(self): # See https://github.com/hgrecco/pint/issues/372 self.assertFalse(units.byte != units.Unit('byte'))