def get_multiple_explorations_by_version(exp_id, version_numbers): """Returns a list of Exploration domain objects corresponding to the specified versions. Args: exp_id: str. ID of the exploration. version_numbers: list(int). List of version numbers. Returns: list(Exploration). List of Exploration domain objects. Raises: Exception. One or more of the given versions of the exploration could not be converted to the latest schema version. """ explorations = [] exploration_models = exp_models.ExplorationModel.get_multi_versions( exp_id, version_numbers) error_versions = [] for index, exploration_model in enumerate(exploration_models): try: explorations.append(get_exploration_from_model(exploration_model)) except utils.ExplorationConversionError: error_versions.append(version_numbers[index]) if error_versions: raise Exception( 'Exploration %s, versions [%s] could not be converted to latest ' 'schema version.' % (exp_id, ', '.join(python_utils.MAP(str, error_versions)))) return explorations
def test_tokenize(self): """Tests for tokenize method.""" expression = 'a+b' expected_output = ['a', '+', 'b'] actual_output = python_utils.MAP( lambda x: x.text, expression_parser.tokenize(expression)) self.assertEqual(list(actual_output), expected_output) expression = '53.4 - 6/alpha' expected_output = ['53.4', '-', '6', '/', 'alpha'] actual_output = python_utils.MAP( lambda x: x.text, expression_parser.tokenize(expression)) self.assertEqual(list(actual_output), expected_output) expression = 'a^0.5 + (-zeta)' expected_output = ['a', '^', '0.5', '+', '(', '-', 'zeta', ')'] actual_output = python_utils.MAP( lambda x: x.text, expression_parser.tokenize(expression)) self.assertEqual(list(actual_output), expected_output) expression = 'sqrt(3/[-A])' expected_output = ['sqrt', '(', '3', '/', '(', '-', 'A', ')', ')'] actual_output = python_utils.MAP( lambda x: x.text, expression_parser.tokenize(expression)) self.assertEqual(list(actual_output), expected_output) expression = 'abs(sqrt(3)) * 4/ 2^ 3 ' expected_output = [ 'abs', '(', 'sqrt', '(', '3', ')', ')', '*', '4', '/', '2', '^', '3' ] actual_output = python_utils.MAP( lambda x: x.text, expression_parser.tokenize(expression)) self.assertEqual(list(actual_output), expected_output) expression = '' expected_output = [] actual_output = python_utils.MAP( lambda x: x.text, expression_parser.tokenize(expression)) self.assertEqual(list(actual_output), expected_output) expression = '3.4^4.3/0.0005 * {9}' expected_output = [ '3.4', '^', '4.3', '/', '0.0005', '*', '(', '9', ')' ] actual_output = python_utils.MAP( lambda x: x.text, expression_parser.tokenize(expression)) self.assertEqual(list(actual_output), expected_output) expression = 'ab' expected_output = ['a', '*', 'b'] actual_output = python_utils.MAP( lambda x: x.text, expression_parser.tokenize(expression)) self.assertEqual(list(actual_output), expected_output) expression = 'a**bc' expected_output = ['a', '*', '*', 'b', '*', 'c'] actual_output = python_utils.MAP( lambda x: x.text, expression_parser.tokenize(expression)) self.assertEqual(list(actual_output), expected_output) expression = 'Alpha' expected_output = ['A', '*', 'l', '*', 'p', '*', 'h', '*', 'a'] actual_output = python_utils.MAP( lambda x: x.text, expression_parser.tokenize(expression)) self.assertEqual(list(actual_output), expected_output) expression = 'alpha' expected_output = ['alpha'] actual_output = python_utils.MAP( lambda x: x.text, expression_parser.tokenize(expression)) self.assertEqual(list(actual_output), expected_output) expression = 'alphax' expected_output = ['alpha', '*', 'x'] actual_output = python_utils.MAP( lambda x: x.text, expression_parser.tokenize(expression)) self.assertEqual(list(actual_output), expected_output) expression = 'xalpha' expected_output = ['x', '*', 'alpha'] actual_output = python_utils.MAP( lambda x: x.text, expression_parser.tokenize(expression)) self.assertEqual(list(actual_output), expected_output) expression = '2.2gamma/23' expected_output = ['2.2', '*', 'gamma', '/', '23'] actual_output = python_utils.MAP( lambda x: x.text, expression_parser.tokenize(expression)) self.assertEqual(list(actual_output), expected_output) expression = '2pir^2/2' expected_output = ['2', '*', 'pi', '*', 'r', '^', '2', '/', '2'] actual_output = python_utils.MAP( lambda x: x.text, expression_parser.tokenize(expression)) self.assertEqual(list(actual_output), expected_output) expression = 'sigmaepsilon' expected_output = ['sigma', '*', 'epsilon'] actual_output = python_utils.MAP( lambda x: x.text, expression_parser.tokenize(expression)) self.assertEqual(list(actual_output), expected_output) expression = 'sqrt(epsilonpsi-2abeta)' expected_output = [ 'sqrt', '(', 'epsilon', '*', 'psi', '-', '2', '*', 'a', '*', 'beta', ')' ] actual_output = python_utils.MAP( lambda x: x.text, expression_parser.tokenize(expression)) self.assertEqual(list(actual_output), expected_output) expression = 'alphasqrt(3/4)' expected_output = ['alpha', '*', 'sqrt', '(', '3', '/', '4', ')'] actual_output = python_utils.MAP( lambda x: x.text, expression_parser.tokenize(expression)) self.assertEqual(list(actual_output), expected_output) expression = 'tan(theta)cos(theta)' expected_output = [ 'tan', '(', 'theta', ')', '*', 'cos', '(', 'theta', ')' ] actual_output = python_utils.MAP( lambda x: x.text, expression_parser.tokenize(expression)) self.assertEqual(list(actual_output), expected_output) expression = '(a+b)(a-b)' expected_output = [ '(', 'a', '+', 'b', ')', '*', '(', 'a', '-', 'b', ')' ] actual_output = python_utils.MAP( lambda x: x.text, expression_parser.tokenize(expression)) self.assertEqual(list(actual_output), expected_output) expression = 'xsqrt(2)x' expected_output = ['x', '*', 'sqrt', '(', '2', ')', '*', 'x'] actual_output = python_utils.MAP( lambda x: x.text, expression_parser.tokenize(expression)) self.assertEqual(list(actual_output), expected_output) expression = 'sin(pi)(a - x^2alpha)' expected_output = [ 'sin', '(', 'pi', ')', '*', '(', 'a', '-', 'x', '^', '2', '*', 'alpha', ')' ] actual_output = python_utils.MAP( lambda x: x.text, expression_parser.tokenize(expression)) self.assertEqual(list(actual_output), expected_output) expression = 'cosh(3a45theta) + sin(x(theta))' expected_output = [ 'cosh', '(', '3', '*', 'a', '*', '45', '*', 'theta', ')', '+', 'sin', '(', 'x', '*', '(', 'theta', ')', ')' ] actual_output = python_utils.MAP( lambda x: x.text, expression_parser.tokenize(expression)) self.assertEqual(list(actual_output), expected_output) with self.assertRaisesRegexp(Exception, 'Invalid token: ..'): expression_parser.tokenize('a.3') with self.assertRaisesRegexp(Exception, 'Invalid token: ..'): expression_parser.tokenize('.3 - 2.4') with self.assertRaisesRegexp(Exception, 'Invalid token: ..'): expression_parser.tokenize('1.2.3 + 4/2') with self.assertRaisesRegexp(Exception, 'Invalid token: ..'): expression_parser.tokenize('a . . 3') with self.assertRaisesRegexp(Exception, 'Invalid token: ..'): expression_parser.tokenize('3..4') with self.assertRaisesRegexp(Exception, 'Invalid token: ..'): expression_parser.tokenize('..5')
def test_tokenize(self): """Tests for tokenize method.""" expression = 'a+b' expected_output = ['a', '+', 'b'] actual_output = python_utils.MAP( lambda x: x.text, expression_parser.tokenize(expression)) self.assertEqual(list(actual_output), expected_output) expression = '53.4 - 6/alpha' expected_output = ['53.4', '-', '6', '/', 'alpha'] actual_output = python_utils.MAP( lambda x: x.text, expression_parser.tokenize(expression)) self.assertEqual(list(actual_output), expected_output) expression = 'a^0.5 + (-zeta)' expected_output = ['a', '^', '0.5', '+', '(', '-', 'zeta', ')'] actual_output = python_utils.MAP( lambda x: x.text, expression_parser.tokenize(expression)) self.assertEqual(list(actual_output), expected_output) expression = 'sqrt(3/[-A])' expected_output = ['sqrt', '(', '3', '/', '(', '-', 'A', ')', ')'] actual_output = python_utils.MAP( lambda x: x.text, expression_parser.tokenize(expression)) self.assertEqual(list(actual_output), expected_output) expression = 'abs(sqrt(3)) * 4/ 2^ 3 ' expected_output = [ 'abs', '(', 'sqrt', '(', '3', ')', ')', '*', '4', '/', '2', '^', '3' ] actual_output = python_utils.MAP( lambda x: x.text, expression_parser.tokenize(expression)) self.assertEqual(list(actual_output), expected_output) expression = '' expected_output = [] actual_output = python_utils.MAP( lambda x: x.text, expression_parser.tokenize(expression)) self.assertEqual(list(actual_output), expected_output) expression = '3.4^4.3/0.0005 * {9}' expected_output = [ '3.4', '^', '4.3', '/', '0.0005', '*', '(', '9', ')' ] actual_output = python_utils.MAP( lambda x: x.text, expression_parser.tokenize(expression)) self.assertEqual(list(actual_output), expected_output) with self.assertRaises(Exception): expression_parser.tokenize('a.3') with self.assertRaises(Exception): expression_parser.tokenize('.3 - 2.4') with self.assertRaises(Exception): expression_parser.tokenize('1.2.3 + 4/2') with self.assertRaises(Exception): expression_parser.tokenize('a . . 3') with self.assertRaises(Exception): expression_parser.tokenize('3..4') with self.assertRaises(Exception): expression_parser.tokenize('..5')