add tokenizer tests for splitting path environment variables

This commit is contained in:
Axel Kohlmeyer 2020-08-29 22:06:33 -04:00
parent 8601e608ca
commit 7413dc783e
No known key found for this signature in database
GPG Key ID: D9B44E93BF0C375A
1 changed files with 22 additions and 0 deletions

View File

@ -62,6 +62,28 @@ TEST(Tokenizer, iterate_words)
ASSERT_EQ(t.count(), 2);
}
TEST(Tokenizer, unix_paths)
{
Tokenizer t(":one:two:three:", ":");
ASSERT_EQ(t.count(), 3);
ASSERT_THAT(t.next(), Eq("one"));
ASSERT_THAT(t.next(), Eq("two"));
ASSERT_EQ(t.has_next(), true);
ASSERT_THAT(t.next(), Eq("three"));
ASSERT_EQ(t.has_next(), false);
}
TEST(Tokenizer, windows_paths)
{
Tokenizer t("c:\\one;\\two\\three;d:four;", ";");
ASSERT_EQ(t.count(), 3);
ASSERT_THAT(t.next(), Eq("c:\\one"));
ASSERT_EQ(t.has_next(), true);
ASSERT_THAT(t.next(), Eq("\\two\\three"));
ASSERT_THAT(t.next(), Eq("d:four"));
ASSERT_EQ(t.has_next(), false);
}
TEST(Tokenizer, default_separators)
{
Tokenizer t(" \r\n test \t word \f");