fix token consumption so we dont get random data at the end

This commit is contained in:
2026-02-16 06:57:18 -05:00
parent f0d3eca5cb
commit 9518eb255e
2 changed files with 21 additions and 1 deletions

View File

@@ -203,6 +203,16 @@ parse_key_condition_expression :: proc(
sk_condition = skc
}
// Verify all tokens were consumed (no trailing garbage)
if trailing := tokenizer_next(&t); trailing != nil {
delete(pk_name)
attr_value_destroy(&pk_value)
if skc, has_skc := sk_condition.?; has_skc {
skc_copy := skc
sort_key_condition_destroy(&skc_copy)
}
return
}
kc = Key_Condition{
pk_name = pk_name,

View File

@@ -184,7 +184,17 @@ parse_filter_expression :: proc(
) -> (node: ^Filter_Node, ok: bool) {
t := tokenizer_init(expression)
node, ok = parse_or_expr(&t, attribute_names, attribute_values)
return
if !ok {
return nil, false
}
// Verify all tokens were consumed (no trailing garbage)
if trailing := tokenizer_next(&t); trailing != nil {
filter_node_destroy(node)
return nil, false
}
return node, true
}
parse_or_expr :: proc(