a bit of further work

This commit is contained in:
2021-06-30 23:29:09 +02:00
parent 5c7908ac4b
commit b55115f7c3
10 changed files with 309 additions and 56 deletions

View File

@@ -1,7 +1,7 @@
#include "parser.h"
#include "exception.h"
// TOOD handle premature eof
Parser::Parser() {
lexer = Lexer{};
@@ -9,12 +9,14 @@ Parser::Parser() {
std::unique_ptr<Node> Parser::parse(const std::string &code) {
lexer.parse(code);
lexer.debugTokens();
// lexer.debugTokens();
if (lexer.tokenType() == TokenType::keyword_create && lexer.nextTokenType() == TokenType::keyword_table) {
return parse_create_table();
} if (lexer.tokenType() == TokenType::keyword_insert) {
return parse_insert_into_table();
} if (lexer.tokenType() == TokenType::keyword_select) {
return parse_select();
return parse_select_from_table();
}
return std::make_unique<Node>(NodeType::error);
@@ -31,10 +33,11 @@ std::unique_ptr<Node> Parser::parse_create_table() {
lexer.nextToken();
lexer.skipToken(TokenType::open_paren);
int column_order = 0;
do {
std::string column_name;
ColumnType column_type;
int column_len {1};
int column_len {1};
bool column_nullable {true};
// column name
@@ -68,10 +71,10 @@ std::unique_ptr<Node> Parser::parse_create_table() {
lexer.nextToken();
}
cols_def.push_back(ColDefNode(column_name, column_type, column_len, column_nullable));
cols_def.push_back(ColDefNode(column_name, column_type, column_order++, column_len, column_nullable));
lexer.skipTokenOptional(TokenType::comma);
if (lexer.tokenType() == TokenType::comma) lexer.nextToken();
// TODO in future constraints
} while (lexer.tokenType() != TokenType::close_paren);
@@ -80,8 +83,66 @@ std::unique_ptr<Node> Parser::parse_create_table() {
return std::make_unique<CreateTableNode>(table_name, cols_def);
}
std::unique_ptr<Node> Parser::parse_select() {
std::vector<Node> exec_code {};
return std::make_unique<Node>(NodeType::not_implemented_yet);
std::unique_ptr<Node> Parser::parse_insert_into_table() {
std::vector<Node> exec_code {};
std::vector<ColNameNode> cols_names {};
std::vector<ColValueNode> cols_values {};
lexer.skipToken(TokenType::keyword_insert);
lexer.skipToken(TokenType::keyword_into);
// table name
if (lexer.tokenType() != TokenType::identifier) { /* TODO handle error */ }
std::string table_name = lexer.currentToken().token_string;
lexer.nextToken();
// column names
lexer.skipToken(TokenType::open_paren);
do {
if (lexer.tokenType() != TokenType::identifier) { /* TODO handle error */ }
cols_names.push_back(lexer.currentToken().token_string);
lexer.nextToken();
lexer.skipTokenOptional(TokenType::comma);
} while (lexer.tokenType() != TokenType::close_paren);
lexer.skipToken(TokenType::close_paren);
lexer.skipToken(TokenType::keyword_values);
// column values
lexer.skipToken(TokenType::open_paren);
do {
cols_values.push_back(lexer.currentToken().token_string);
lexer.nextToken();
lexer.skipTokenOptional(TokenType::comma);
} while (lexer.tokenType() != TokenType::close_paren);
lexer.skipToken(TokenType::close_paren);
return std::make_unique<InsertIntoTableNode>(table_name, cols_names, cols_values);
}
std::unique_ptr<Node> Parser::parse_select_from_table() {
std::vector<Node> where {};
std::vector<ColNameNode> cols_names {};
lexer.skipToken(TokenType::keyword_select);
// TODO support also numbers and expressions
while (lexer.tokenType() != TokenType::keyword_from) {
// TODO add consumeToken() which returns token and advances to next token
cols_names.push_back(lexer.currentToken().token_string);
lexer.nextToken();
lexer.skipTokenOptional(TokenType::comma);
}
lexer.skipToken(TokenType::keyword_from);
std::string table_name = lexer.currentToken().token_string;
lexer.nextToken();
if (lexer.tokenType() == TokenType::keyword_where) {}
// if (lexer.tokenType() == TokenType::keyword_order_by) {}
// if (lexer.tokenType() == TokenType::keyword_offset) {}
// if (lexer.tokenType() == TokenType::keyword_limit) {}
return std::make_unique<SelectFromTableNode>(table_name, cols_names, where);
}