Lexer
- Implemented ::token_split, which performs a full lex, and doesn't require white space like ::word_split does. - Added unit tests.
This commit is contained in:
@@ -630,6 +630,19 @@ void Lexer::word_split (std::vector <std::string>& words, const std::string& inp
|
||||
words.push_back (word);
|
||||
}
|
||||
|
||||
////////////////////////////////////////////////////////////////////////////////
|
||||
// Split 'input' into 'tokens'.
|
||||
void Lexer::token_split (std::vector <std::string>& words, const std::string& input)
|
||||
{
|
||||
words.clear ();
|
||||
|
||||
std::string word;
|
||||
Lexer::Type type;
|
||||
Lexer lex (input);
|
||||
while (lex.token (word, type))
|
||||
words.push_back (word);
|
||||
}
|
||||
|
||||
////////////////////////////////////////////////////////////////////////////////
|
||||
bool Lexer::is_punct (int c) const
|
||||
{
|
||||
|
||||
Reference in New Issue
Block a user