Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Original file line number Diff line number Diff line change
Expand Up @@ -234,15 +234,12 @@ struct BaseModeFunction {
}

template <class STATE, class OP>
static void Combine(const STATE &source, STATE &target, AggregateInputData &) {
static void Combine(const STATE &source, STATE &target, AggregateInputData &aggr_input_data) {
if (!source.frequency_map) {
return;
}
if (!target.frequency_map) {
// Copy - don't destroy! Otherwise windowing will break.
target.frequency_map = new typename STATE::Counts(*source.frequency_map);
target.count = source.count;
return;
target.frequency_map = TYPE_OP::CreateEmpty(aggr_input_data.allocator);
}
for (auto &val : *source.frequency_map) {
auto &i = (*target.frequency_map)[val.first];
Expand Down
42 changes: 24 additions & 18 deletions src/duckdb/src/common/adbc/adbc.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -1320,12 +1320,21 @@ AdbcStatusCode StatementSetOption(struct AdbcStatement *statement, const char *k
return ADBC_STATUS_INVALID_ARGUMENT;
}

std::string createFilter(const char *input) {
if (input) {
auto quoted = duckdb::KeywordHelper::WriteQuoted(input, '\'');
return quoted;
}
return "'%'";
}

AdbcStatusCode ConnectionGetObjects(struct AdbcConnection *connection, int depth, const char *catalog,
const char *db_schema, const char *table_name, const char **table_type,
const char *column_name, struct ArrowArrayStream *out, struct AdbcError *error) {
std::string catalog_filter = catalog ? catalog : "%";
std::string db_schema_filter = db_schema ? db_schema : "%";
std::string table_name_filter = table_name ? table_name : "%";
std::string catalog_filter = createFilter(catalog);
std::string db_schema_filter = createFilter(db_schema);
std::string table_name_filter = createFilter(table_name);
std::string column_name_filter = createFilter(column_name);
std::string table_type_condition = "";
if (table_type && table_type[0]) {
table_type_condition = " AND table_type IN (";
Expand All @@ -1341,13 +1350,10 @@ AdbcStatusCode ConnectionGetObjects(struct AdbcConnection *connection, int depth
if (i > 0) {
table_type_condition += ", ";
}
table_type_condition += "'";
table_type_condition += table_type[i];
table_type_condition += "'";
table_type_condition += createFilter(table_type[i]);
}
table_type_condition += ")";
}
std::string column_name_filter = column_name ? column_name : "%";

std::string query;
switch (depth) {
Expand Down Expand Up @@ -1392,7 +1398,7 @@ AdbcStatusCode ConnectionGetObjects(struct AdbcConnection *connection, int depth
)[] catalog_db_schemas
FROM
information_schema.schemata
WHERE catalog_name LIKE '%s'
WHERE catalog_name LIKE %s
GROUP BY catalog_name
)",
catalog_filter);
Expand All @@ -1405,7 +1411,7 @@ AdbcStatusCode ConnectionGetObjects(struct AdbcConnection *connection, int depth
catalog_name,
schema_name,
FROM information_schema.schemata
WHERE schema_name LIKE '%s'
WHERE schema_name LIKE %s
)

SELECT
Expand Down Expand Up @@ -1448,7 +1454,7 @@ AdbcStatusCode ConnectionGetObjects(struct AdbcConnection *connection, int depth
information_schema.schemata
LEFT JOIN db_schemas dbs
USING (catalog_name, schema_name)
WHERE catalog_name LIKE '%s'
WHERE catalog_name LIKE %s
GROUP BY catalog_name
)",
db_schema_filter, catalog_filter);
Expand Down Expand Up @@ -1492,7 +1498,7 @@ AdbcStatusCode ConnectionGetObjects(struct AdbcConnection *connection, int depth
)[],
}) db_schema_tables
FROM information_schema.tables
WHERE table_name LIKE '%s'%s
WHERE table_name LIKE %s%s
GROUP BY table_catalog, table_schema
),
db_schemas AS (
Expand All @@ -1503,7 +1509,7 @@ AdbcStatusCode ConnectionGetObjects(struct AdbcConnection *connection, int depth
FROM information_schema.schemata
LEFT JOIN tables
USING (catalog_name, schema_name)
WHERE schema_name LIKE '%s'
WHERE schema_name LIKE %s
)

SELECT
Expand All @@ -1516,7 +1522,7 @@ AdbcStatusCode ConnectionGetObjects(struct AdbcConnection *connection, int depth
information_schema.schemata
LEFT JOIN db_schemas dbs
USING (catalog_name, schema_name)
WHERE catalog_name LIKE '%s'
WHERE catalog_name LIKE %s
GROUP BY catalog_name
)",
table_name_filter, table_type_condition, db_schema_filter, catalog_filter);
Expand Down Expand Up @@ -1551,7 +1557,7 @@ AdbcStatusCode ConnectionGetObjects(struct AdbcConnection *connection, int depth
xdbc_is_generatedcolumn: NULL::BOOLEAN,
}) table_columns
FROM information_schema.columns
WHERE column_name LIKE '%s'
WHERE column_name LIKE %s
GROUP BY table_catalog, table_schema, table_name
),
constraints AS (
Expand Down Expand Up @@ -1580,7 +1586,7 @@ AdbcStatusCode ConnectionGetObjects(struct AdbcConnection *connection, int depth
constraint_column_names,
list_filter(
constraint_column_names,
lambda name: name LIKE '%s'
lambda name: name LIKE %s
)
)
GROUP BY database_name, schema_name, table_name
Expand All @@ -1600,7 +1606,7 @@ AdbcStatusCode ConnectionGetObjects(struct AdbcConnection *connection, int depth
USING (table_catalog, table_schema, table_name)
LEFT JOIN constraints
USING (table_catalog, table_schema, table_name)
WHERE table_name LIKE '%s'%s
WHERE table_name LIKE %s%s
GROUP BY table_catalog, table_schema
),
db_schemas AS (
Expand All @@ -1611,7 +1617,7 @@ AdbcStatusCode ConnectionGetObjects(struct AdbcConnection *connection, int depth
FROM information_schema.schemata
LEFT JOIN tables
USING (catalog_name, schema_name)
WHERE schema_name LIKE '%s'
WHERE schema_name LIKE %s
)

SELECT
Expand All @@ -1624,7 +1630,7 @@ AdbcStatusCode ConnectionGetObjects(struct AdbcConnection *connection, int depth
information_schema.schemata
LEFT JOIN db_schemas dbs
USING (catalog_name, schema_name)
WHERE catalog_name LIKE '%s'
WHERE catalog_name LIKE %s
GROUP BY catalog_name
)",
column_name_filter, column_name_filter, table_name_filter,
Expand Down
6 changes: 3 additions & 3 deletions src/duckdb/src/common/arrow/schema_metadata.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -97,21 +97,21 @@ unsafe_unique_array<char> ArrowSchemaMetadata::SerializeMetadata() const {
auto metadata_array_ptr = make_unsafe_uniq_array<char>(total_size);
auto metadata_ptr = metadata_array_ptr.get();
// 1. number of key-value pairs (int32)
const idx_t map_size = schema_metadata_map.size();
const int32_t map_size = static_cast<int32_t>(schema_metadata_map.size());
memcpy(metadata_ptr, &map_size, sizeof(int32_t));
metadata_ptr += sizeof(int32_t);
// Iterate through each key-value pair in the map
for (const auto &pair : schema_metadata_map) {
const std::string &key = pair.first;
idx_t key_size = key.size();
int32_t key_size = static_cast<int32_t>(key.size());
// Length of the key (int32)
memcpy(metadata_ptr, &key_size, sizeof(int32_t));
metadata_ptr += sizeof(int32_t);
// Key
memcpy(metadata_ptr, key.c_str(), key_size);
metadata_ptr += key_size;
const std::string &value = pair.second;
const idx_t value_size = value.size();
const int32_t value_size = static_cast<int32_t>(value.size());
// Length of the value (int32)
memcpy(metadata_ptr, &value_size, sizeof(int32_t));
metadata_ptr += sizeof(int32_t);
Expand Down
12 changes: 7 additions & 5 deletions src/duckdb/src/common/enum_util.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -3029,6 +3029,7 @@ const StringUtil::EnumStringLiteral *GetMetricTypeValues() {
{ static_cast<uint32_t>(MetricType::OPTIMIZER_CTE_INLINING), "OPTIMIZER_CTE_INLINING" },
{ static_cast<uint32_t>(MetricType::OPTIMIZER_COMMON_SUBPLAN), "OPTIMIZER_COMMON_SUBPLAN" },
{ static_cast<uint32_t>(MetricType::OPTIMIZER_JOIN_ELIMINATION), "OPTIMIZER_JOIN_ELIMINATION" },
{ static_cast<uint32_t>(MetricType::OPTIMIZER_COUNT_WINDOW_ELIMINATION), "OPTIMIZER_COUNT_WINDOW_ELIMINATION" },
{ static_cast<uint32_t>(MetricType::ALL_OPTIMIZERS), "ALL_OPTIMIZERS" },
{ static_cast<uint32_t>(MetricType::CUMULATIVE_OPTIMIZER_TIMING), "CUMULATIVE_OPTIMIZER_TIMING" },
{ static_cast<uint32_t>(MetricType::PHYSICAL_PLANNER), "PHYSICAL_PLANNER" },
Expand All @@ -3043,12 +3044,12 @@ const StringUtil::EnumStringLiteral *GetMetricTypeValues() {

template<>
const char* EnumUtil::ToChars<MetricType>(MetricType value) {
return StringUtil::EnumToString(GetMetricTypeValues(), 66, "MetricType", static_cast<uint32_t>(value));
return StringUtil::EnumToString(GetMetricTypeValues(), 67, "MetricType", static_cast<uint32_t>(value));
}

template<>
MetricType EnumUtil::FromString<MetricType>(const char *value) {
return static_cast<MetricType>(StringUtil::StringToEnum(GetMetricTypeValues(), 66, "MetricType", value));
return static_cast<MetricType>(StringUtil::StringToEnum(GetMetricTypeValues(), 67, "MetricType", value));
}

const StringUtil::EnumStringLiteral *GetMultiFileColumnMappingModeValues() {
Expand Down Expand Up @@ -3284,19 +3285,20 @@ const StringUtil::EnumStringLiteral *GetOptimizerTypeValues() {
{ static_cast<uint32_t>(OptimizerType::LATE_MATERIALIZATION), "LATE_MATERIALIZATION" },
{ static_cast<uint32_t>(OptimizerType::CTE_INLINING), "CTE_INLINING" },
{ static_cast<uint32_t>(OptimizerType::COMMON_SUBPLAN), "COMMON_SUBPLAN" },
{ static_cast<uint32_t>(OptimizerType::JOIN_ELIMINATION), "JOIN_ELIMINATION" }
{ static_cast<uint32_t>(OptimizerType::JOIN_ELIMINATION), "JOIN_ELIMINATION" },
{ static_cast<uint32_t>(OptimizerType::COUNT_WINDOW_ELIMINATION), "COUNT_WINDOW_ELIMINATION" }
};
return values;
}

template<>
const char* EnumUtil::ToChars<OptimizerType>(OptimizerType value) {
return StringUtil::EnumToString(GetOptimizerTypeValues(), 33, "OptimizerType", static_cast<uint32_t>(value));
return StringUtil::EnumToString(GetOptimizerTypeValues(), 34, "OptimizerType", static_cast<uint32_t>(value));
}

template<>
OptimizerType EnumUtil::FromString<OptimizerType>(const char *value) {
return static_cast<OptimizerType>(StringUtil::StringToEnum(GetOptimizerTypeValues(), 33, "OptimizerType", value));
return static_cast<OptimizerType>(StringUtil::StringToEnum(GetOptimizerTypeValues(), 34, "OptimizerType", value));
}

const StringUtil::EnumStringLiteral *GetOrderByNullTypeValues() {
Expand Down
1 change: 1 addition & 0 deletions src/duckdb/src/common/enums/optimizer_type.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -45,6 +45,7 @@ static const DefaultOptimizerType internal_optimizer_types[] = {
{"cte_inlining", OptimizerType::CTE_INLINING},
{"common_subplan", OptimizerType::COMMON_SUBPLAN},
{"join_elimination", OptimizerType::JOIN_ELIMINATION},
{"count_window_elimination", OptimizerType::COUNT_WINDOW_ELIMINATION},
{nullptr, OptimizerType::INVALID}};

string OptimizerTypeToString(OptimizerType type) {
Expand Down
49 changes: 21 additions & 28 deletions src/duckdb/src/common/types/geometry.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -16,7 +16,8 @@ class BlobWriter {
public:
template <class T>
void Write(const T &value) {
auto ptr = reinterpret_cast<const char *>(&value);
auto le_value = BSwapIfBE(value);
auto ptr = reinterpret_cast<const char *>(&le_value);
buffer.insert(buffer.end(), ptr, ptr + sizeof(T));
}

Expand All @@ -38,16 +39,12 @@ class BlobWriter {
if (reserved.offset + sizeof(T) > buffer.size()) {
throw InternalException("Write out of bounds in BinaryWriter");
}
auto ptr = reinterpret_cast<const char *>(&reserved.value);
auto le_value = BSwapIfBE(reserved.value);
auto ptr = reinterpret_cast<const char *>(&le_value);
// We've reserved 0 bytes, so we can safely memcpy
memcpy(buffer.data() + reserved.offset, ptr, sizeof(T));
}

void Write(const char *data, size_t size) {
D_ASSERT(data != nullptr);
buffer.insert(buffer.end(), data, data + size);
}

const vector<char> &GetBuffer() const {
return buffer;
}
Expand All @@ -70,18 +67,11 @@ class FixedSizeBlobWriter {
if (pos + sizeof(T) > end) {
throw InvalidInputException("Writing beyond end of binary data at position %zu", pos - beg);
}
memcpy(pos, &value, sizeof(T));
auto le_value = BSwapIfBE(value);
memcpy(pos, &le_value, sizeof(T));
pos += sizeof(T);
}

void Write(const char *data, size_t size) {
if (pos + size > end) {
throw InvalidInputException("Writing beyond end of binary data at position %zu", pos - beg);
}
memcpy(pos, data, size);
pos += size;
}

size_t GetPosition() const {
return static_cast<idx_t>(pos - beg);
}
Expand Down Expand Up @@ -112,17 +102,9 @@ class BlobReader {
throw InvalidInputException("Unexpected end of binary data at position %zu", pos - beg);
}
T value;
if (LE) {
memcpy(&value, pos, sizeof(T));
pos += sizeof(T);
} else {
char temp[sizeof(T)];
for (size_t i = 0; i < sizeof(T); ++i) {
temp[i] = pos[sizeof(T) - 1 - i];
}
memcpy(&value, temp, sizeof(T));
pos += sizeof(T);
}
memcpy(&value, pos, sizeof(T));
value = LE ? BSwapIfBE(value) : BSwapIfLE(value);
pos += sizeof(T);
return value;
}

Expand Down Expand Up @@ -1060,9 +1042,20 @@ static uint32_t ParseVerticesInternal(BlobReader &reader, GeometryExtent &extent

// Issue a single .Reserve() for all vertices, to minimize bounds checking overhead
const auto ptr = const_data_ptr_cast(reader.Reserve(vert_count * sizeof(VERTEX_TYPE)));

#if DUCKDB_IS_BIG_ENDIAN
double be_buffer[sizeof(VERTEX_TYPE)];
auto be_ptr = reinterpret_cast<const_data_ptr_t>(be_buffer);
#endif
for (uint32_t vert_idx = 0; vert_idx < vert_count; vert_idx++) {
#if DUCKDB_IS_BIG_ENDIAN
auto vert_ofs = vert_idx * sizeof(VERTEX_TYPE);
for (idx_t i = 0; i < sizeof(VERTEX_TYPE) / sizeof(double); ++i) {
be_buffer[i] = LoadLE<double>(ptr + vert_ofs + i * sizeof(double));
}
VERTEX_TYPE vertex = Load<VERTEX_TYPE>(be_ptr);
#else
VERTEX_TYPE vertex = Load<VERTEX_TYPE>(ptr + vert_idx * sizeof(VERTEX_TYPE));
#endif
if (check_nan && vertex.AllNan()) {
continue;
}
Expand Down
8 changes: 5 additions & 3 deletions src/duckdb/src/common/types/hash.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -84,7 +84,7 @@ hash_t HashBytes(const_data_ptr_t ptr, const idx_t len) noexcept {
// Hash/combine in blocks of 8 bytes
const auto remainder = len & 7U;
for (const auto end = ptr + len - remainder; ptr != end; ptr += 8U) {
h ^= Load<hash_t>(ptr);
h ^= LoadLE<hash_t>(ptr);
h *= 0xd6e8feb86659fd93U;
}

Expand All @@ -93,14 +93,15 @@ hash_t HashBytes(const_data_ptr_t ptr, const idx_t len) noexcept {
D_ASSERT(len >= 8);
// Load remaining (<8) bytes (with a Load instead of a memcpy)
const auto inv_rem = 8U - remainder;
const auto hr = Load<hash_t>(ptr - inv_rem) >> (inv_rem * 8U);
const auto hr = LoadLE<hash_t>(ptr - inv_rem) >> (inv_rem * 8U);

h ^= hr;
h *= 0xd6e8feb86659fd93U;
} else {
// Load remaining (<8) bytes (with a memcpy)
hash_t hr = 0;
memcpy(&hr, ptr, remainder);
hr = BSwapIfBE(hr);

h ^= hr;
h *= 0xd6e8feb86659fd93U;
Expand All @@ -122,14 +123,15 @@ hash_t Hash(string_t val) {

// Hash/combine the first 8-byte block
if (!val.Empty()) {
h ^= Load<hash_t>(const_data_ptr_cast(val.GetPrefix()));
h ^= LoadLE<hash_t>(const_data_ptr_cast(val.GetPrefix()));
h *= 0xd6e8feb86659fd93U;
}

// Load remaining 4 bytes
if (val.GetSize() > sizeof(hash_t)) {
hash_t hr = 0;
memcpy(&hr, const_data_ptr_cast(val.GetPrefix()) + sizeof(hash_t), 4U);
hr = BSwapIfBE(hr);

h ^= hr;
h *= 0xd6e8feb86659fd93U;
Expand Down
Loading