def test_tokenize_target_with_plus_sign(): tokens = tokenize_target("foo -opt1=+value1 --flag, bar -opt2=test,+v") expected_tokens = [ "foo", "-opt1=+value1", "--flag", ",", "bar", "-opt2=test,+v" ] assert len(tokens) == len(expected_tokens) assert tokens == expected_tokens
def test_tokenize_target_with_opts(): tokens = tokenize_target("foo -opt1=value1 --flag, bar -opt2=value2") expected_tokens = [ "foo", "-opt1=value1", "--flag", ",", "bar", "-opt2=value2" ] assert len(tokens) == len(expected_tokens) assert tokens == expected_tokens
def test_tokenize_target_with_dashes(): tokens = tokenize_target("foo-bar1 -opt-1=t-e-s-t, baz") expected_tokens = ["foo-bar1", "-opt-1=t-e-s-t", ",", "baz"] assert len(tokens) == len(expected_tokens) assert tokens == expected_tokens
def test_tokenize_target_with_commas_and_double_quotes(): tokens = tokenize_target('foo -opt1="v, a, l, u, e", bar') expected_tokens = ["foo", '-opt1="v, a, l, u, e"', ",", "bar"] assert len(tokens) == len(expected_tokens) assert tokens == expected_tokens
def test_tokenize_target_with_commas_and_single_quotes(): tokens = tokenize_target("foo -opt1='v, a, l, u, e', bar") expected_tokens = ["foo", "-opt1='v, a, l, u, e'", ",", "bar"] assert len(tokens) == len(expected_tokens) assert tokens == expected_tokens
def test_tokenize_target_with_commas(): tokens = tokenize_target("foo -opt1=v,a,l,u,e,1 --flag") expected_tokens = ["foo", "-opt1=v,a,l,u,e,1", "--flag"] assert len(tokens) == len(expected_tokens) assert tokens == expected_tokens