def parseUnit(units, conversions, tokens, overwrite=False): """ Convert the next series of tokens into a unit @param units: a map of unit symbols to unit objects to be modified @param conversions: a map of unit symbols to scale factors to be modified @param tokens: a list of tokens """ baseUnitMap = {} # Handle base unit sym = UC_Utils.parseSymbol(tokens) if not overwrite and (sym in units): raise UC_Common.FileFormatError( f"Duplicate definition of unit '{sym}'") # Handle derived unit nextToken = UC_Utils.getNextToken(tokens) if nextToken == UC_Common.MAP_DELIMITER: scaleFactor = UC_Utils.parseFloat(tokens) UC_Utils.getNextToken(tokens, UC_Common.SEP_DELIMITER) baseUnitMap = parseBaseUnitMap(tokens) conversions[sym] = scaleFactor # Handle other tokens elif nextToken != UC_Common.END_DELIMITER: raise UC_Common.FileFormatError( f"Expected delimiter; received '{nextToken}'") # Create unit units[sym] = UC_Unit.Unit(sym, baseUnitMap)
def tokenize(lines: list = []): """ Generate a list of tokens from a list of strings @param lines: the list of strings from which to generate tokens @return a list of tokens """ tokens = [] for line in lines: token = '' for char in line: # Stop processing line if there is a comment if char == UC_Common.COMMENT_DELIMITER: if token: tokens.append(token) break # Handle whitespace if UC_Utils.isWhitespace(char): if token: tokens.append(token) token = '' # Handle delimiters elif UC_Utils.isDelimiter(char): if token: tokens.append(token) token = '' tokens.append(char) # Build token else: token += char if token: tokens.append(token) return tokens
def tokenize(line): """ Convert a string into a list of tokens @param line: the string to convert @return a list of tokens """ tokens = [] token = '' # Identify tokens parseFloatState = 0 for char in line: # Handle float-like tokens token, parseFloatState = tokenizeFloat(token, tokens, char, parseFloatState) # Handle non-float tokens if parseFloatState: token += char elif UC_Utils.isWhitespace(char): if token: tokens.append(token) token = "" elif UC_Utils.isSpecialChar(char): if token: tokens.append(token) token = "" tokens.append(char) else: token += char token, parseFloatState = tokenizeFloat(token, tokens, "", parseFloatState) if token: tokens.append(token) return tokens
def convertToRPN(tokens): """ Convert infix notation into Reverse Polish Notation for building the AST @param tokens: the list of tokens in infix notation order @post: this implementation uses the Shunting-yard algorithm """ outputQueue = [] operatorStack = [] for token in tokens: if token == UC_Common.BRACKET_OPEN: operatorStack.append(token) elif token == UC_Common.BRACKET_SHUT: while operatorStack and operatorStack[-1] != UC_Common.BRACKET_OPEN: outputQueue.append(operatorStack.pop()) if operatorStack.pop() != UC_Common.BRACKET_OPEN: raise UC_CommonUnitError( f"Detected mismatched parentheses: '{UC_Common.BRACKET_SHUT}'" ) elif UC_Utils.isOperator(token): while (operatorStack and operatorStack[-1] != UC_Common.BRACKET_OPEN and UC_Utils.hasHigherPrecedence(operatorStack[-1], token)): outputQueue.append(operatorStack.pop()) operatorStack.append(token) else: outputQueue.append(token) while operatorStack: if operatorStack[-1] == UC_Common.BRACKET_OPEN: raise UC_CommonUnitError( f"Detected mismatched parentheses: '{UC_Common.BRACKET_OPEN}'") outputQueue.append(operatorStack.pop()) return outputQueue
def aggregateSignHelper(tokens, updatedTokens): while tokens: token = tokens.pop(0) if token == UC_Common.BRACKET_OPEN: updatedTokens.append(token) aggregateSignHelper(tokens, updatedTokens) elif token == UC_Common.BRACKET_SHUT: updatedTokens.append(token) return updatedTokens elif ((token == UC_Common.OPERATOR_ADD or token == UC_Common.OPERATOR_SUB) and (not updatedTokens or UC_Utils.isSpecialChar(updatedTokens[-1]))): if tokens and UC_Utils.isFloat(tokens[0]): updatedTokens.append(f"{token}{tokens.pop(0)}") elif not updatedTokens or updatedTokens[ -1] != UC_Common.BRACKET_SHUT: updatedTokens.extend([ UC_Common.BRACKET_OPEN, f"{token}1", UC_Common.OPERATOR_MUL ]) aggregateSignHelper(tokens, updatedTokens) updatedTokens.append(UC_Common.BRACKET_SHUT) else: updatedTokens.append(token) else: updatedTokens.append(token)
def parsePrefixMapping(prefixes, tokens, base, overwrite=False): """ Convert the next series of tokens into a prefix-exponent pair @param prefixes: the prefix-exponent map to modify @param tokens: a list of tokens @param base: the base for the exponent """ prefix = UC_Utils.getNextToken(tokens) if not overwrite and (prefix in prefixes): raise UC_Common.FileFormatError( f"Duplicate definition of prefix '{prefix}'") prefixes[prefix] = (base, Decimal(UC_Utils.parseInt(tokens)))
def delUnit(self, symToDelete): if symToDelete in self.units: unitsToDelete = {symToDelete} # Find all units to delete foundDependentUnit = True while foundDependentUnit: foundDependentUnit = False for sym, unit in self.units.items(): if sym in unitsToDelete: continue for dependencySym in unit.baseUnits.keys(): prefix, baseSym = UC_Utils.stripPrefix( self.units, dependencySym) if baseSym in unitsToDelete: unitsToDelete.add(sym) foundDependentUnit = True break # Delete all units which need to be deleted for sym in unitsToDelete: del self.units[sym] if sym in self.conversions: del self.conversions[sym] return unitsToDelete else: try: unitDefStr = self.getUnitDefinitionStr(symToDelete) except: raise UC_Common.UnitError( f"Cannot delete '{symToDelete}' - unit does not exist") raise UC_Common.UnitError( f"Cannot delete '{symToDelete}' - unit contains a prefix: {unitDefStr}" )
def delPrefix(self, symToDelete): if symToDelete in self.prefixes: unitsToDelete = set() # Find all units to delete foundDependentUnit = True while foundDependentUnit: foundDependentUnit = False for sym, unit in self.units.items(): if sym in unitsToDelete: continue for dependencySym in unit.baseUnits.keys(): prefix, baseSym = UC_Utils.stripPrefix( self.units, dependencySym) if prefix == symToDelete or baseSym in unitsToDelete: unitsToDelete.add(sym) foundDependentUnit = True break # Delete all units which need to be deleted for sym in unitsToDelete: del self.units[sym] if sym in self.conversions: del self.conversions[sym] # Delete from prefix map del self.prefixes[symToDelete] return unitsToDelete else: raise UC_Common.UnitError( f"Cannot delete '{symToDelete}' - prefix does not exist")
def getUnitDefinitionStr(self, string): prefix, sym = UC_Utils.stripPrefix(self.units, string) if prefix: scaleFactor = self.getPrefixScaleFactor(prefix) return f"1 {string} = prefix: '{prefix}', unit: '{sym}' = {scaleFactor} {sym}" else: quantity = self.conversions[sym] if sym in self.conversions else 1 return f"1 {string} = {quantity} {self.units[sym].__str__(True)}"
def addPrefix(self, sym, base, exp): if not UC_Utils.isValidSymbol(sym): raise UC_Common.UnitError( f"Invalid prefix '{sym}': valid prefix symbols are composed of alphabetical characters and underscores" ) if sym in self.prefixes: base, exp = self.prefixes[sym] raise UC_Common.UnitError( f"Prefix '{sym}' already exists: '{sym}' = {base}^{exp} = {base**exp}" ) else: # Try adding prefix prefixes = self.prefixes.copy() prefixes[sym] = (base, exp) # Check that all dependencies exist and check for an acyclic dependency graph UC_Utils.validate(self.units, self.conversions, prefixes) self.prefixes = prefixes
def loadFile(filename, units, conversions, prefixes, overwrite): """ Read a file and generate maps of units, conversions, and prefixes @param filename: the name of the file to load @param units: a map of unit symbols to unit objects @param conversions: a map of derived unit symbols to scale factors @param prefixes: a map of prefixes to exponents """ # Generate a list of tokens from the file file = open(filename, 'r') tokens = tokenize(file.readlines()) file.close() # Parse tokens to generate maps UC_FileParser.parseFile(tokens, units, conversions, prefixes, overwrite) # Check that all dependencies exist and check for an acyclic dependency graph UC_Utils.validate(units, conversions, prefixes)
def handleParseExpDecrement(tokens, unitTokens, parsingExp): # Check if multiplication needs to be injected between adjacent units if parsingExp != 1: return parsingExp if tokens: if tokens[0] == UC_Common.OPERATOR_MUL or tokens[ 0] == UC_Common.OPERATOR_DIV: unitTokens.append(tokens.pop(0)) elif UC_Utils.isValidSymbol(tokens[0]): unitTokens.append(UC_Common.OPERATOR_MUL) return 0
def handleAppendUnitSymbol(tokens, unitTokens, parsingExp): if tokens: token = tokens[0] if token == UC_Common.OPERATOR_EXP: unitTokens.append(tokens.pop(0)) return 1 elif token == UC_Common.OPERATOR_MUL or token == UC_Common.OPERATOR_DIV: unitTokens.append(tokens.pop(0)) elif UC_Utils.isValidSymbol(token): unitTokens.append(UC_Common.OPERATOR_MUL) return parsingExp
def aggregateQuantities(tokens): """ Combine tokens which constitute a quantity @param tokens: a list of tokens """ aggregatedTokens = [] needsValue = True while tokens: if UC_Utils.isOperator(tokens[0]): if needsValue: raise UC_Common.UnitError( f"Expected float; received '{tokens[0]}'") aggregatedTokens.append(tokens.pop(0)) needsValue = True elif UC_Utils.isSpecialChar(tokens[0]): aggregatedTokens.append(tokens.pop(0)) else: needsValue = False # Get value quantity = '1' try: float(tokens[0]) quantity = tokens.pop(0) except: # Inject multiplication where needed if aggregatedTokens and aggregatedTokens[ -1] == UC_Common.BRACKET_SHUT: aggregatedTokens.append(UC_Common.OPERATOR_MUL) # Get unit unit = [] if tokens and isinstance(tokens[0], list): unit = tokens.pop(0) aggregatedTokens.append((quantity, unit)) if needsValue and aggregatedTokens: raise UC_Common.UnitError(f"Expected float; no tokens received") return aggregatedTokens
def addUnit(self, sym, scaleFactor, unit): if not UC_Utils.isValidSymbol(sym): raise UC_Common.UnitError( f"Invalid symbol '{sym}': valid unit symbols are composed of alphabetical characters and underscores" ) if sym in self.units: quantity = self.conversions[sym] if sym in self.conversions else 1 raise UC_Common.UnitError( f"Unit '{sym}' already exists: {self.getUnitDefinitionStr(sym)}" ) else: # Try adding unit units = self.units.copy() units[sym] = UC_Unit.Unit(sym, unit.reduce()) conversions = self.conversions.copy() conversions[sym] = scaleFactor # Check that all dependencies exist and check for an acyclic dependency graph UC_Utils.validate(units, conversions, self.prefixes) self.units = units self.conversions = conversions
def parsePrefix(prefixes, tokens, overwrite=False): """ Convert the next series of tokens into prefix-exponent pairs @param prefixes: the prefix-exponent map to modify @param tokens: a list of tokens """ base = UC_Utils.parseFloat(tokens) UC_Utils.getNextToken(tokens, UC_Common.MAP_DELIMITER) parsePrefixMapping(prefixes, tokens, base, overwrite) while UC_Utils.peekNextToken(tokens) == UC_Common.SEP_DELIMITER: UC_Utils.getNextToken(tokens) parsePrefixMapping(prefixes, tokens, base, overwrite) UC_Utils.getNextToken(tokens, UC_Common.END_DELIMITER)
def parseBaseUnitMap(tokens): """ Convert the next series of tokens into a map of units to exponents @param tokens: a list of tokens @return pairs of units and their corresponding exponents """ baseUnitMap = {} baseSym = UC_Utils.parseSymbol(tokens) if baseSym not in baseUnitMap: baseUnitMap[baseSym] = 0 baseUnitMap[baseSym] += UC_Utils.parseInt(tokens) while UC_Utils.peekNextToken(tokens) == UC_Common.SEP_DELIMITER: UC_Utils.getNextToken(tokens) baseSym = UC_Utils.parseSymbol(tokens) if baseSym not in baseUnitMap: baseUnitMap[baseSym] = 0 baseUnitMap[baseSym] += UC_Utils.parseInt(tokens) UC_Utils.getNextToken(tokens, UC_Common.END_DELIMITER) return baseUnitMap
def parseFile(tokens, units, conversions, prefixes, overwrite=False): """ Convert a list of tokens into maps of units, conversions, and prefixes @param tokens: a list of tokens @param units: a map of unit symbols to unit objects @param conversions: a map of derived unit symbols to scale factors @param prefixes: a map of prefixes to exponents """ while len(tokens): if UC_Utils.isValidSymbol(tokens[0]): parseUnit(units, conversions, tokens, overwrite) else: parsePrefix(prefixes, tokens, overwrite) # Return result of parsing return units, conversions, prefixes
def appendUnitTokens(aggregatedTokens, unitTokens, token=None): # Append unit tokens to list of aggregated tokens if unitTokens: if unitTokens[-1] == UC_Common.OPERATOR_MUL or unitTokens[ -1] == UC_Common.OPERATOR_DIV: operator = unitTokens.pop() aggregatedTokens.append(unitTokens) aggregatedTokens.append(operator) else: aggregatedTokens.append(unitTokens) if token is not None: # Inject multiplication if needed if ((aggregatedTokens) and (not UC_Utils.isSpecialChar(aggregatedTokens[-1])) and (token == UC_Common.BRACKET_OPEN)): aggregatedTokens.append(UC_Common.OPERATOR_MUL) aggregatedTokens.append(token) return []
def processPrefixes(self, units): scaleFactor = Decimal(1) unitsToUpdate = {} for prefixedSym, exp in units.items(): # Find prefix and base unit prefix, baseUnit = UC_Utils.stripPrefix(self.units, prefixedSym) if len(prefix) == 0: continue if prefix not in self.prefixes: raise UC_Common.UnitError(f"Unknown unit: '{prefixedSym}'") unitsToUpdate[prefixedSym] = baseUnit # Calculate scale factor scaleFactor *= self.getPrefixScaleFactor(prefix)**(exp) # Update unit map for fromSym, toSym in unitsToUpdate.items(): if not (toSym in units): units[toSym] = 0 units[toSym] += units[fromSym] units.pop(fromSym) return scaleFactor
def convert(self, srcUnit, dstUnit): srcUnits = srcUnit.reduce() dstUnits = dstUnit.reduce() # Reduce all units to irreducible performedReduction = True scaleFactor = 1 while performedReduction: # Simplify units using SI prefixes scaleFactor *= self.processPrefixes(srcUnits) scaleFactor /= self.processPrefixes(dstUnits) # Factor out common units and perform a topological sort to find the next unit to reduce self.factorUnits(srcUnits, dstUnits) unitsToReduce = UC_Utils.topologicalSort( self.units, [*srcUnits.keys()] + [*dstUnits.keys()]) # Reduce units if len(unitsToReduce) == 0: break performedReduction = self.units[unitsToReduce[0]].isDerivedUnit() if performedReduction: if unitsToReduce[0] in srcUnits: scaleFactor *= self.reduceUnit(unitsToReduce[0], srcUnits) else: scaleFactor /= self.reduceUnit(unitsToReduce[0], dstUnits) # Remove cancelled units removeCancelledUnits(srcUnits) removeCancelledUnits(dstUnits) # Check for conversion error if len(srcUnits) > 0 or len(dstUnits) > 0: raise UC_Common.UnitError( f"Invalid conversion: {str(srcUnit)} to {str(dstUnit)}") return scaleFactor
def parseUnit(tokens): tokens = convertToRPN(tokens) stack = [] for token in tokens: if token == UC_Common.OPERATOR_ADD: a = stack.pop() if not isinstance(a, int): raise UC_Common.UnitError(f"Expected int; received '{a}'") b = stack.pop() if not isinstance(b, int): raise UC_Common.UnitError(f"Expected int; received '{b}'") stack.append(b + a) elif token == UC_Common.OPERATOR_SUB: a = stack.pop() if not isinstance(a, int): raise UC_Common.UnitError(f"Expected int; received '{a}'") b = stack.pop() if not isinstance(b, int): raise UC_Common.UnitError(f"Expected int; received '{b}'") stack.append(b - a) elif token == UC_Common.OPERATOR_MUL: a = stack.pop() if not isinstance(a, dict): a = {a: 1} b = stack.pop() if not isinstance(b, dict): b = {b: 1} for sym, exp in b.items(): if sym not in a: a[sym] = 0 a[sym] += exp stack.append(a) elif token == UC_Common.OPERATOR_DIV: a = stack.pop() if not isinstance(a, dict): a = {a: 1} b = stack.pop() if not isinstance(b, dict): b = {b: 1} for sym, exp in a.items(): if sym not in b: b[sym] = 0 b[sym] -= exp stack.append(b) elif token == UC_Common.OPERATOR_EXP: a = stack.pop() b = stack.pop() if not isinstance(a, int): raise UC_Common.UnitError(f"Expected int; received '{a}'") stack.append({b: a}) else: if UC_Utils.isInt(token): stack.append(int(token)) else: stack.append(token) # Aggregate into a single map units = {} while stack: top = stack.pop() if isinstance(top, dict): for sym, exp in top.items(): if sym not in units: units[sym] = 0 units[sym] += exp elif UC_Utils.isValidSymbol(top): if top not in units: units[top] = 0 units[top] += 1 else: raise UC_Common.UnitError("Invalid expression") return units
def aggregateUnits(tokens): """ Combine tokens which constitute compound units @param tokens: a list of tokens """ aggregatedTokens = [] unitTokens = [] parsingExp = 0 def appendUnitTokens(aggregatedTokens, unitTokens, token=None): # Append unit tokens to list of aggregated tokens if unitTokens: if unitTokens[-1] == UC_Common.OPERATOR_MUL or unitTokens[ -1] == UC_Common.OPERATOR_DIV: operator = unitTokens.pop() aggregatedTokens.append(unitTokens) aggregatedTokens.append(operator) else: aggregatedTokens.append(unitTokens) if token is not None: # Inject multiplication if needed if ((aggregatedTokens) and (not UC_Utils.isSpecialChar(aggregatedTokens[-1])) and (token == UC_Common.BRACKET_OPEN)): aggregatedTokens.append(UC_Common.OPERATOR_MUL) aggregatedTokens.append(token) return [] def handleParseExpDecrement(tokens, unitTokens, parsingExp): # Check if multiplication needs to be injected between adjacent units if parsingExp != 1: return parsingExp if tokens: if tokens[0] == UC_Common.OPERATOR_MUL or tokens[ 0] == UC_Common.OPERATOR_DIV: unitTokens.append(tokens.pop(0)) elif UC_Utils.isValidSymbol(tokens[0]): unitTokens.append(UC_Common.OPERATOR_MUL) return 0 def handleAppendUnitSymbol(tokens, unitTokens, parsingExp): if tokens: token = tokens[0] if token == UC_Common.OPERATOR_EXP: unitTokens.append(tokens.pop(0)) return 1 elif token == UC_Common.OPERATOR_MUL or token == UC_Common.OPERATOR_DIV: unitTokens.append(tokens.pop(0)) elif UC_Utils.isValidSymbol(token): unitTokens.append(UC_Common.OPERATOR_MUL) return parsingExp while tokens: token = UC_Utils.getNextToken(tokens) if token == UC_Common.BRACKET_OPEN: if parsingExp: unitTokens.append(token) parsingExp += 1 else: unitTokens = appendUnitTokens(aggregatedTokens, unitTokens, token) elif token == UC_Common.BRACKET_SHUT: if parsingExp: unitTokens.append(token) parsingExp = handleParseExpDecrement(tokens, unitTokens, parsingExp - 1) else: unitTokens = appendUnitTokens(aggregatedTokens, unitTokens, token) elif UC_Utils.isFloat(token): if parsingExp: if not UC_Utils.isInt(token): raise UC_Common.UnitError( f"Expected int; received '{token}'") unitTokens.append(token) parsingExp = handleParseExpDecrement(tokens, unitTokens, parsingExp) else: unitTokens = appendUnitTokens(aggregatedTokens, unitTokens, token) elif UC_Utils.isValidSymbol(token): if parsingExp: raise UC_Common.UnitError(f"Expected int; received '{token}'") unitTokens.append(token) parsingExp = handleAppendUnitSymbol(tokens, unitTokens, parsingExp) elif UC_Utils.isOperator(token): if parsingExp: raise UC_Common.UnitError(f"Expected int; received '{token}'") else: unitTokens = appendUnitTokens(aggregatedTokens, unitTokens, token) else: raise UC_Common.UnitError(f"Unknown token; received '{token}'") appendUnitTokens(aggregatedTokens, unitTokens) return aggregatedTokens
def test_parser(verbose=False): test_result = 0 # Test token queue tokens = ["A1", "B2", "C3"] if UC_Utils.getNextToken(tokens) != "A1": test_result += test_fail("Failed to get expected symbol", verbose) if UC_Utils.getNextToken(tokens) != "B2": test_result += test_fail("Failed to get expected symbol", verbose) if UC_Utils.getNextToken(tokens) != "C3": test_result += test_fail("Failed to get expected symbol", verbose) try: token = UC_Utils.getNextToken(tokens) test_result += test_fail(f"Received unexpected token {token}", verbose) except: pass # Test the parsing of basic datatypes res = UC_Utils.parseInt(["2"]) if res != Decimal("2"): test_result += test_fail("Incorrectly parsed int", verbose) res = UC_Utils.parseFloat(["2.7"]) if res != Decimal("2.7"): test_result += test_fail("Incorrectly parsed float", verbose) res = UC_Utils.parseSymbol(["sym"]) if res != "sym": test_result += test_fail("Incorrectly parsed symbol", verbose) # Test the parsing of unit dependencies baseUnitMap = UC_FileParser.parseBaseUnitMap( ["A", "1", ",", "B", "2", ";"]) if baseUnitMap["A"] != 1: test_result += test_fail("Incorrectly parsed base unit map", verbose) if baseUnitMap["B"] != 2: test_result += test_fail("Incorrectly parsed base unit map", verbose) if len(baseUnitMap) != 2: test_result += test_fail("Incorrectly parsed base unit map", verbose) try: baseUnitMap = UC_FileParser.parseBaseUnitMap( ["A", "1", ",", "B", "2", "C"]) test_result += test_fail( "Should fail to parse an incorrectly formatted dependency string", verbose) except: pass # Test the parsing of a base unit units = {} conversions = {} UC_FileParser.parseUnit(units, conversions, ["A", ";"]) if (("A" not in units) or ("A" in conversions) or (len(units) != 1)): test_result += test_fail("Incorrectly parsed base unit", verbose) try: UC_FileParser.parseUnit(["A", "B"]) test_result += test_fail( "Should fail to parse an incorrectly formatted base unit string", verbose) except: pass # Test the parsing of a derived unit UC_FileParser.parseUnit( units, conversions, ["H", ":", "12.4", ",", "A", "1", ",", "B", "2", ";"]) if (("H" not in units) or (units["H"].baseUnits["A"] != 1) or (units["H"].baseUnits["B"] != 2) or (len(units["H"].baseUnits) != 2) or (conversions["H"] != Decimal("12.4")) or (len(units) != 2)): test_result += test_fail("Incorrectly parsed derived unit", verbose) try: UC_FileParser.parseUnit( units, conversions, ["H", ":", "12.4", ",", "A", "1", ",", "B", "2", "C"]) test_result += test_fail( "Should fail to parse an incorrectly formatted derived unit string", verbose) except: pass try: UC_FileParser.parseUnit(units, conversions, ["H", ":", "12.4", ";"]) test_result += test_fail( "Should fail to parse an incorrectly formatted derived unit string", verbose) except: pass # Test the parsing of a prefix map prefixes = {} UC_FileParser.parsePrefix(prefixes, ["10", ":", "k", "3", ",", "M", "6", ";"]) if ((len(prefixes) != 2) or (prefixes["k"] != (10, 3)) or (prefixes["M"] != (10, 6))): test_result += test_fail("Incorrectly parsed prefix map", verbose) try: UC_FileParser.parsePrefix(prefixes, ["10", ":", "k", "3", ",", "M", "6", "C"]) test_result += test_fail( "Should fail to parse an incorrectly formatted prefix map string", verbose) except: pass try: UC_FileParser.parsePrefix(prefixes, ["10", ":", ";"]) test_result += test_fail( "Should fail to parse an incorrectly formatted prefix map string", verbose) except: pass return test_result