| 1 | #include <map> |
| 2 | #include <Parsers/Lexer.h> |
| 3 | #include <Parsers/TokenIterator.h> |
| 4 | #include <Core/Types.h> |
| 5 | #include <IO/ReadBufferFromFileDescriptor.h> |
| 6 | #include <IO/WriteBufferFromFileDescriptor.h> |
| 7 | #include <IO/ReadHelpers.h> |
| 8 | #include <IO/WriteHelpers.h> |
| 9 | |
| 10 | |
| 11 | /// How to test: |
| 12 | /// for i in ~/work/ClickHouse/dbms/tests/queries/0_stateless/*.sql; do echo $i; grep -q 'FORMAT' $i || ./lexer < $i || break; done |
| 13 | /// |
| 14 | |
| 15 | |
| 16 | using namespace DB; |
| 17 | |
| 18 | std::map<TokenType, const char *> hilite = |
| 19 | { |
| 20 | {TokenType::Whitespace, "\033[0;44m" }, |
| 21 | {TokenType::Comment, "\033[1;46m" }, |
| 22 | {TokenType::BareWord, "\033[1m" }, |
| 23 | {TokenType::Number, "\033[1;36m" }, |
| 24 | {TokenType::StringLiteral, "\033[1;32m" }, |
| 25 | {TokenType::QuotedIdentifier, "\033[1;35m" }, |
| 26 | |
| 27 | {TokenType::OpeningRoundBracket, "\033[1;33m" }, |
| 28 | {TokenType::ClosingRoundBracket, "\033[1;33m" }, |
| 29 | {TokenType::OpeningSquareBracket, "\033[1;33m" }, |
| 30 | {TokenType::ClosingSquareBracket, "\033[1;33m" }, |
| 31 | {TokenType::OpeningCurlyBrace, "\033[1;33m" }, |
| 32 | {TokenType::ClosingCurlyBrace, "\033[1;33m" }, |
| 33 | |
| 34 | {TokenType::Comma, "\033[1;33m" }, |
| 35 | {TokenType::Semicolon, "\033[1;33m" }, |
| 36 | {TokenType::Dot, "\033[1;33m" }, |
| 37 | {TokenType::Asterisk, "\033[1;33m" }, |
| 38 | {TokenType::Plus, "\033[1;33m" }, |
| 39 | {TokenType::Minus, "\033[1;33m" }, |
| 40 | {TokenType::Slash, "\033[1;33m" }, |
| 41 | {TokenType::Percent, "\033[1;33m" }, |
| 42 | {TokenType::Arrow, "\033[1;33m" }, |
| 43 | {TokenType::QuestionMark, "\033[1;33m" }, |
| 44 | {TokenType::Colon, "\033[1;33m" }, |
| 45 | {TokenType::Equals, "\033[1;33m" }, |
| 46 | {TokenType::NotEquals, "\033[1;33m" }, |
| 47 | {TokenType::Less, "\033[1;33m" }, |
| 48 | {TokenType::Greater, "\033[1;33m" }, |
| 49 | {TokenType::LessOrEquals, "\033[1;33m" }, |
| 50 | {TokenType::GreaterOrEquals, "\033[1;33m" }, |
| 51 | {TokenType::Concatenation, "\033[1;33m" }, |
| 52 | |
| 53 | {TokenType::EndOfStream, "" }, |
| 54 | |
| 55 | {TokenType::Error, "\033[0;41m" }, |
| 56 | {TokenType::ErrorMultilineCommentIsNotClosed, "\033[0;41m" }, |
| 57 | {TokenType::ErrorSingleQuoteIsNotClosed, "\033[0;41m" }, |
| 58 | {TokenType::ErrorDoubleQuoteIsNotClosed, "\033[0;41m" }, |
| 59 | {TokenType::ErrorBackQuoteIsNotClosed, "\033[0;41m" }, |
| 60 | {TokenType::ErrorSingleExclamationMark, "\033[0;41m" }, |
| 61 | {TokenType::ErrorWrongNumber, "\033[0;41m" }, |
| 62 | {TokenType::ErrorMaxQuerySizeExceeded, "\033[0;41m" }, |
| 63 | }; |
| 64 | |
| 65 | |
| 66 | int main(int, char **) |
| 67 | { |
| 68 | String query; |
| 69 | ReadBufferFromFileDescriptor in(STDIN_FILENO); |
| 70 | WriteBufferFromFileDescriptor out(STDOUT_FILENO); |
| 71 | readStringUntilEOF(query, in); |
| 72 | |
| 73 | Lexer lexer(query.data(), query.data() + query.size()); |
| 74 | |
| 75 | while (true) |
| 76 | { |
| 77 | Token token = lexer.nextToken(); |
| 78 | |
| 79 | if (token.isEnd()) |
| 80 | break; |
| 81 | |
| 82 | writeChar(' ', out); |
| 83 | |
| 84 | auto it = hilite.find(token.type); |
| 85 | if (it != hilite.end()) |
| 86 | writeCString(it->second, out); |
| 87 | |
| 88 | writeString(token.begin, token.size(), out); |
| 89 | |
| 90 | if (it != hilite.end()) |
| 91 | writeCString("\033[0m" , out); |
| 92 | |
| 93 | writeChar(' ', out); |
| 94 | |
| 95 | if (token.isError()) |
| 96 | return 1; |
| 97 | } |
| 98 | |
| 99 | writeChar('\n', out); |
| 100 | /* |
| 101 | Tokens tokens(query.data(), query.data() + query.size()); |
| 102 | TokenIterator token(tokens); |
| 103 | |
| 104 | while (token->type.isEnd()) |
| 105 | { |
| 106 | auto it = hilite.find(token->type); |
| 107 | if (it != hilite.end()) |
| 108 | writeCString(it->second, out); |
| 109 | |
| 110 | writeString(token->begin, token->size(), out); |
| 111 | |
| 112 | if (it != hilite.end()) |
| 113 | writeCString("\033[0m", out); |
| 114 | |
| 115 | writeChar('\n', out); |
| 116 | ++token; |
| 117 | }*/ |
| 118 | |
| 119 | return 0; |
| 120 | } |
| 121 | |