another ugly basic implementation

This commit is contained in:
2021-07-04 15:03:13 +02:00
parent b55115f7c3
commit b4711985b3
12 changed files with 417 additions and 60 deletions

View File

@@ -19,6 +19,7 @@ std::unique_ptr<Node> Parser::parse(const std::string &code) {
return parse_select_from_table();
}
std::cout << "ERROR, token:" << lexer.currentToken().token_string << std::endl;
return std::make_unique<Node>(NodeType::error);
}
@@ -29,9 +30,8 @@ std::unique_ptr<Node> Parser::parse_create_table() {
lexer.skipToken(TokenType::keyword_table);
if (lexer.tokenType() != TokenType::identifier) { /* TODO handle error */ }
std::string table_name = lexer.currentToken().token_string;
lexer.nextToken();
std::string table_name = lexer.consumeCurrentToken().token_string;
lexer.skipToken(TokenType::open_paren);
int column_order = 0;
do {
@@ -42,8 +42,7 @@ std::unique_ptr<Node> Parser::parse_create_table() {
// column name
if (lexer.tokenType() != TokenType::identifier) { /* TODO handle error */ }
column_name = lexer.currentToken().token_string;
lexer.nextToken();
column_name = lexer.consumeCurrentToken().token_string;
// column type and optionaly len
if (lexer.tokenType() == TokenType::keyword_int) {
@@ -57,8 +56,7 @@ std::unique_ptr<Node> Parser::parse_create_table() {
lexer.nextToken();
lexer.skipToken(TokenType::open_paren);
if (lexer.tokenType() == TokenType::int_number) {
column_len = std::stoi(lexer.currentToken().token_string);
lexer.nextToken();
column_len = std::stoi(lexer.consumeCurrentToken().token_string);
} else { /* TODO handle error */ }
lexer.skipToken(TokenType::close_paren);
} else { /* TODO handle error */ }
@@ -94,15 +92,13 @@ std::unique_ptr<Node> Parser::parse_insert_into_table() {
// table name
if (lexer.tokenType() != TokenType::identifier) { /* TODO handle error */ }
std::string table_name = lexer.currentToken().token_string;
lexer.nextToken();
std::string table_name = lexer.consumeCurrentToken().token_string;
// column names
lexer.skipToken(TokenType::open_paren);
do {
if (lexer.tokenType() != TokenType::identifier) { /* TODO handle error */ }
cols_names.push_back(lexer.currentToken().token_string);
lexer.nextToken();
cols_names.push_back(lexer.consumeCurrentToken().token_string);
lexer.skipTokenOptional(TokenType::comma);
} while (lexer.tokenType() != TokenType::close_paren);
@@ -113,8 +109,7 @@ std::unique_ptr<Node> Parser::parse_insert_into_table() {
// column values
lexer.skipToken(TokenType::open_paren);
do {
cols_values.push_back(lexer.currentToken().token_string);
lexer.nextToken();
cols_values.push_back(lexer.consumeCurrentToken().token_string);
lexer.skipTokenOptional(TokenType::comma);
} while (lexer.tokenType() != TokenType::close_paren);
@@ -124,25 +119,69 @@ std::unique_ptr<Node> Parser::parse_insert_into_table() {
}
std::unique_ptr<Node> Parser::parse_select_from_table() {
std::vector<Node> where {};
std::vector<ColNameNode> cols_names {};
std::unique_ptr<Node> where_node;
lexer.skipToken(TokenType::keyword_select);
// TODO support also numbers and expressions
while (lexer.tokenType() != TokenType::keyword_from) {
// TODO add consumeToken() which returns token and advances to next token
cols_names.push_back(lexer.currentToken().token_string);
lexer.nextToken();
cols_names.push_back(lexer.consumeCurrentToken().token_string);
lexer.skipTokenOptional(TokenType::comma);
}
lexer.skipToken(TokenType::keyword_from);
std::string table_name = lexer.currentToken().token_string;
lexer.nextToken();
std::string table_name = lexer.consumeCurrentToken().token_string;
if (lexer.tokenType() == TokenType::keyword_where) {}
if (lexer.tokenType() == TokenType::keyword_where) {
lexer.skipToken(TokenType::keyword_where);
where_node = parse_where_clause();
} else {
where_node = std::make_unique<TrueNode>();
}
// if (lexer.tokenType() == TokenType::keyword_order_by) {}
// if (lexer.tokenType() == TokenType::keyword_offset) {}
// if (lexer.tokenType() == TokenType::keyword_limit) {}
return std::make_unique<SelectFromTableNode>(table_name, cols_names, where);
return std::make_unique<SelectFromTableNode>(table_name, cols_names, std::move(where_node));
}
std::unique_ptr<Node> Parser::parse_where_clause() {
// TODO add support for multiple filters
// TODO add support for parenthises
auto left = parse_operand_node();
auto operation = parse_operator();
auto right = parse_operand_node();
return std::make_unique<RelationalOperatorNode>(operation, std::move(left), std::move(right));
}
std::unique_ptr<Node> Parser::parse_operand_node() {
// while not end or order or limit
auto token_type = lexer.tokenType();
std::string tokenString = lexer.consumeCurrentToken().token_string;
switch (token_type) {
case TokenType::int_number:
return std::make_unique<IntValueNode>(std::stoi(tokenString));
case TokenType::double_number:
return std::make_unique<FloatValueNode>(std::stod(tokenString));
case TokenType::string_literal:
return std::make_unique<StringValueNode>(tokenString);
case TokenType::identifier:
return std::make_unique<DatabaseValueNode>(tokenString);
default: ;
// Throw exception
}
}
RelationalOperatorType Parser::parse_operator() {
auto op = lexer.consumeCurrentToken();
switch (op.type) {
case TokenType::equal:
return RelationalOperatorType::equal;
case TokenType::greater:
return RelationalOperatorType::greater;
default: ;
// Throw exception
}
}