148 lines
4.8 KiB
C++
148 lines
4.8 KiB
C++
#include "parser.h"
|
|
#include "exception.h"
|
|
|
|
// TOOD handle premature eof
|
|
|
|
Parser::Parser() {
|
|
lexer = Lexer{};
|
|
}
|
|
|
|
std::unique_ptr<Node> Parser::parse(const std::string &code) {
|
|
lexer.parse(code);
|
|
// lexer.debugTokens();
|
|
|
|
if (lexer.tokenType() == TokenType::keyword_create && lexer.nextTokenType() == TokenType::keyword_table) {
|
|
return parse_create_table();
|
|
} if (lexer.tokenType() == TokenType::keyword_insert) {
|
|
return parse_insert_into_table();
|
|
} if (lexer.tokenType() == TokenType::keyword_select) {
|
|
return parse_select_from_table();
|
|
}
|
|
|
|
return std::make_unique<Node>(NodeType::error);
|
|
}
|
|
|
|
std::unique_ptr<Node> Parser::parse_create_table() {
|
|
std::vector<ColDefNode> cols_def {};
|
|
|
|
lexer.skipToken(TokenType::keyword_create);
|
|
lexer.skipToken(TokenType::keyword_table);
|
|
|
|
if (lexer.tokenType() != TokenType::identifier) { /* TODO handle error */ }
|
|
std::string table_name = lexer.currentToken().token_string;
|
|
lexer.nextToken();
|
|
|
|
lexer.skipToken(TokenType::open_paren);
|
|
int column_order = 0;
|
|
do {
|
|
std::string column_name;
|
|
ColumnType column_type;
|
|
int column_len {1};
|
|
bool column_nullable {true};
|
|
|
|
// column name
|
|
if (lexer.tokenType() != TokenType::identifier) { /* TODO handle error */ }
|
|
column_name = lexer.currentToken().token_string;
|
|
lexer.nextToken();
|
|
|
|
// column type and optionaly len
|
|
if (lexer.tokenType() == TokenType::keyword_int) {
|
|
column_type = ColumnType::integer_type;
|
|
lexer.nextToken();
|
|
} else if (lexer.tokenType() == TokenType::keyword_float) {
|
|
column_type = ColumnType::float_type;
|
|
lexer.nextToken();
|
|
} else if (lexer.tokenType() == TokenType::keyword_varchar) {
|
|
column_type = ColumnType::varchar_type;
|
|
lexer.nextToken();
|
|
lexer.skipToken(TokenType::open_paren);
|
|
if (lexer.tokenType() == TokenType::int_number) {
|
|
column_len = std::stoi(lexer.currentToken().token_string);
|
|
lexer.nextToken();
|
|
} else { /* TODO handle error */ }
|
|
lexer.skipToken(TokenType::close_paren);
|
|
} else { /* TODO handle error */ }
|
|
|
|
if (lexer.tokenType() == TokenType::keyword_not) {
|
|
lexer.nextToken();
|
|
lexer.skipToken(TokenType::keyword_null);
|
|
column_nullable = false;
|
|
} else if (lexer.tokenType() == TokenType::keyword_null) {
|
|
lexer.nextToken();
|
|
}
|
|
|
|
cols_def.push_back(ColDefNode(column_name, column_type, column_order++, column_len, column_nullable));
|
|
|
|
lexer.skipTokenOptional(TokenType::comma);
|
|
|
|
// TODO in future constraints
|
|
|
|
} while (lexer.tokenType() != TokenType::close_paren);
|
|
|
|
|
|
return std::make_unique<CreateTableNode>(table_name, cols_def);
|
|
}
|
|
|
|
|
|
std::unique_ptr<Node> Parser::parse_insert_into_table() {
|
|
std::vector<Node> exec_code {};
|
|
std::vector<ColNameNode> cols_names {};
|
|
std::vector<ColValueNode> cols_values {};
|
|
|
|
lexer.skipToken(TokenType::keyword_insert);
|
|
lexer.skipToken(TokenType::keyword_into);
|
|
|
|
// table name
|
|
if (lexer.tokenType() != TokenType::identifier) { /* TODO handle error */ }
|
|
std::string table_name = lexer.currentToken().token_string;
|
|
lexer.nextToken();
|
|
|
|
// column names
|
|
lexer.skipToken(TokenType::open_paren);
|
|
do {
|
|
if (lexer.tokenType() != TokenType::identifier) { /* TODO handle error */ }
|
|
cols_names.push_back(lexer.currentToken().token_string);
|
|
lexer.nextToken();
|
|
|
|
lexer.skipTokenOptional(TokenType::comma);
|
|
} while (lexer.tokenType() != TokenType::close_paren);
|
|
lexer.skipToken(TokenType::close_paren);
|
|
|
|
lexer.skipToken(TokenType::keyword_values);
|
|
|
|
// column values
|
|
lexer.skipToken(TokenType::open_paren);
|
|
do {
|
|
cols_values.push_back(lexer.currentToken().token_string);
|
|
lexer.nextToken();
|
|
|
|
lexer.skipTokenOptional(TokenType::comma);
|
|
} while (lexer.tokenType() != TokenType::close_paren);
|
|
lexer.skipToken(TokenType::close_paren);
|
|
|
|
return std::make_unique<InsertIntoTableNode>(table_name, cols_names, cols_values);
|
|
}
|
|
|
|
std::unique_ptr<Node> Parser::parse_select_from_table() {
|
|
std::vector<Node> where {};
|
|
std::vector<ColNameNode> cols_names {};
|
|
|
|
lexer.skipToken(TokenType::keyword_select);
|
|
// TODO support also numbers and expressions
|
|
while (lexer.tokenType() != TokenType::keyword_from) {
|
|
// TODO add consumeToken() which returns token and advances to next token
|
|
cols_names.push_back(lexer.currentToken().token_string);
|
|
lexer.nextToken();
|
|
lexer.skipTokenOptional(TokenType::comma);
|
|
}
|
|
lexer.skipToken(TokenType::keyword_from);
|
|
std::string table_name = lexer.currentToken().token_string;
|
|
lexer.nextToken();
|
|
|
|
if (lexer.tokenType() == TokenType::keyword_where) {}
|
|
// if (lexer.tokenType() == TokenType::keyword_order_by) {}
|
|
// if (lexer.tokenType() == TokenType::keyword_offset) {}
|
|
// if (lexer.tokenType() == TokenType::keyword_limit) {}
|
|
|
|
return std::make_unique<SelectFromTableNode>(table_name, cols_names, where);
|
|
} |