fix token consumption so we dont get random data at the end
This commit is contained in:
@@ -184,7 +184,17 @@ parse_filter_expression :: proc(
|
||||
) -> (node: ^Filter_Node, ok: bool) {
|
||||
t := tokenizer_init(expression)
|
||||
node, ok = parse_or_expr(&t, attribute_names, attribute_values)
|
||||
return
|
||||
if !ok {
|
||||
return nil, false
|
||||
}
|
||||
|
||||
// Verify all tokens were consumed (no trailing garbage)
|
||||
if trailing := tokenizer_next(&t); trailing != nil {
|
||||
filter_node_destroy(node)
|
||||
return nil, false
|
||||
}
|
||||
|
||||
return node, true
|
||||
}
|
||||
|
||||
parse_or_expr :: proc(
|
||||
|
||||
Reference in New Issue
Block a user