fix token consumption so we dont get random data at the end

This commit is contained in:
2026-02-16 06:57:18 -05:00
parent f0d3eca5cb
commit 9518eb255e
2 changed files with 21 additions and 1 deletions

View File

@@ -184,7 +184,17 @@ parse_filter_expression :: proc(
) -> (node: ^Filter_Node, ok: bool) {
t := tokenizer_init(expression)
node, ok = parse_or_expr(&t, attribute_names, attribute_values)
return
if !ok {
return nil, false
}
// Verify all tokens were consumed (no trailing garbage)
if trailing := tokenizer_next(&t); trailing != nil {
filter_node_destroy(node)
return nil, false
}
return node, true
}
parse_or_expr :: proc(