Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view

Large diffs are not rendered by default.

6 changes: 3 additions & 3 deletions src/duckdb/src/function/table/version/pragma_version.cpp
Original file line number Diff line number Diff line change
@@ -1,5 +1,5 @@
#ifndef DUCKDB_PATCH_VERSION
#define DUCKDB_PATCH_VERSION "4-dev55"
#define DUCKDB_PATCH_VERSION "4-dev67"
#endif
#ifndef DUCKDB_MINOR_VERSION
#define DUCKDB_MINOR_VERSION 4
Expand All @@ -8,10 +8,10 @@
#define DUCKDB_MAJOR_VERSION 1
#endif
#ifndef DUCKDB_VERSION
#define DUCKDB_VERSION "v1.4.4-dev55"
#define DUCKDB_VERSION "v1.4.4-dev67"
#endif
#ifndef DUCKDB_SOURCE_ID
#define DUCKDB_SOURCE_ID "7f217607e4"
#define DUCKDB_SOURCE_ID "5a334c23da"
#endif
#include "duckdb/function/table/system_functions.hpp"
#include "duckdb/main/database.hpp"
Expand Down
40 changes: 21 additions & 19 deletions src/duckdb/src/parser/parser.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -165,27 +165,29 @@ bool Parser::StripUnicodeSpaces(const string &query_str, string &new_query) {
return ReplaceUnicodeSpaces(query_str, new_query, unicode_spaces);
}

vector<string> SplitQueryStringIntoStatements(const string &query) {
// Break sql string down into sql statements using the tokenizer
vector<string> query_statements;
auto tokens = Parser::Tokenize(query);
idx_t next_statement_start = 0;
for (idx_t i = 1; i < tokens.size(); ++i) {
auto &t_prev = tokens[i - 1];
auto &t = tokens[i];
if (t_prev.type == SimplifiedTokenType::SIMPLIFIED_TOKEN_OPERATOR) {
// LCOV_EXCL_START
for (idx_t c = t_prev.start; c <= t.start; ++c) {
if (query.c_str()[c] == ';') {
query_statements.emplace_back(query.substr(next_statement_start, t.start - next_statement_start));
next_statement_start = tokens[i].start;
}
vector<string> SplitQueries(const string &input_query) {
vector<string> queries;
auto tokenized_input = Parser::Tokenize(input_query);
size_t last_split = 0;

for (const auto &token : tokenized_input) {
if (token.type == SimplifiedTokenType::SIMPLIFIED_TOKEN_OPERATOR && input_query[token.start] == ';') {
string segment = input_query.substr(last_split, token.start - last_split);
StringUtil::Trim(segment);
if (!segment.empty()) {
segment.append(";");
queries.push_back(std::move(segment));
}
// LCOV_EXCL_STOP
last_split = token.start + 1;
}
}
query_statements.emplace_back(query.substr(next_statement_start, query.size() - next_statement_start));
return query_statements;
string final_segment = input_query.substr(last_split);
StringUtil::Trim(final_segment);
if (!final_segment.empty()) {
final_segment.append(";");
queries.push_back(std::move(final_segment));
}
return queries;
}

void Parser::ParseQuery(const string &query) {
Expand Down Expand Up @@ -236,7 +238,7 @@ void Parser::ParseQuery(const string &query) {
throw ParserException::SyntaxError(query, parser_error, parser_error_location);
} else {
// split sql string into statements and re-parse using extension
auto query_statements = SplitQueryStringIntoStatements(query);
auto query_statements = SplitQueries(query);
idx_t stmt_loc = 0;
for (auto const &query_statement : query_statements) {
ErrorData another_parser_error;
Expand Down