2026-02-15 15:04:43 -05:00
|
|
|
// DynamoDB Expression Parser
|
|
|
|
|
// Parses KeyConditionExpression with ExpressionAttributeNames and ExpressionAttributeValues
|
|
|
|
|
// Supports: pk = :pk, pk = :pk AND sk > :sk, begins_with(sk, :prefix), BETWEEN, etc.
|
|
|
|
|
package dynamodb
|
|
|
|
|
|
|
|
|
|
import "core:encoding/json"
|
|
|
|
|
import "core:strings"
|
|
|
|
|
|
|
|
|
|
// ============================================================================
|
|
|
|
|
// Sort Key Condition Operators
|
|
|
|
|
// ============================================================================
|
|
|
|
|
|
|
|
|
|
Sort_Key_Operator :: enum {
|
|
|
|
|
EQ, // =
|
|
|
|
|
LT, // <
|
|
|
|
|
LE, // <=
|
|
|
|
|
GT, // >
|
|
|
|
|
GE, // >=
|
|
|
|
|
BETWEEN, // BETWEEN x AND y
|
|
|
|
|
BEGINS_WITH, // begins_with(sk, prefix)
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
// ============================================================================
|
|
|
|
|
// Parsed Structures
|
|
|
|
|
// ============================================================================
|
|
|
|
|
|
|
|
|
|
Sort_Key_Condition :: struct {
|
|
|
|
|
sk_name: string,
|
|
|
|
|
operator: Sort_Key_Operator,
|
|
|
|
|
value: Attribute_Value,
|
|
|
|
|
value2: Maybe(Attribute_Value),
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
sort_key_condition_destroy :: proc(skc: ^Sort_Key_Condition) {
|
2026-02-16 03:11:11 -05:00
|
|
|
delete(skc.sk_name) // Free the cloned string
|
2026-02-15 15:04:43 -05:00
|
|
|
attr_value_destroy(&skc.value)
|
|
|
|
|
if v2, ok := skc.value2.?; ok {
|
|
|
|
|
v2_copy := v2
|
|
|
|
|
attr_value_destroy(&v2_copy)
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
Key_Condition :: struct {
|
|
|
|
|
pk_name: string,
|
|
|
|
|
pk_value: Attribute_Value,
|
|
|
|
|
sk_condition: Maybe(Sort_Key_Condition),
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
key_condition_destroy :: proc(kc: ^Key_Condition) {
|
2026-02-16 03:11:11 -05:00
|
|
|
delete(kc.pk_name) // Free the cloned string
|
2026-02-15 15:04:43 -05:00
|
|
|
attr_value_destroy(&kc.pk_value)
|
|
|
|
|
if skc, ok := kc.sk_condition.?; ok {
|
|
|
|
|
skc_copy := skc
|
|
|
|
|
sort_key_condition_destroy(&skc_copy)
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
// Get the raw partition key value bytes for building storage keys
|
|
|
|
|
key_condition_get_pk_bytes :: proc(kc: ^Key_Condition) -> ([]byte, bool) {
|
|
|
|
|
#partial switch v in kc.pk_value {
|
|
|
|
|
case String:
|
|
|
|
|
return transmute([]byte)string(v), true
|
2026-02-16 08:45:30 -05:00
|
|
|
case DDB_Number:
|
|
|
|
|
// Use canonical encoding for numbers in keys!
|
|
|
|
|
return encode_ddb_number_for_sort(v), true
|
2026-02-15 15:04:43 -05:00
|
|
|
case Binary:
|
|
|
|
|
return transmute([]byte)string(v), true
|
|
|
|
|
}
|
|
|
|
|
return nil, false
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
// ============================================================================
|
|
|
|
|
// Tokenizer
|
|
|
|
|
// ============================================================================
|
|
|
|
|
|
|
|
|
|
Tokenizer :: struct {
|
|
|
|
|
input: string,
|
|
|
|
|
pos: int,
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
tokenizer_init :: proc(input: string) -> Tokenizer {
|
|
|
|
|
return Tokenizer{input = input, pos = 0}
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
tokenizer_next :: proc(t: ^Tokenizer) -> Maybe(string) {
|
|
|
|
|
// Skip whitespace
|
|
|
|
|
for t.pos < len(t.input) && is_whitespace(t.input[t.pos]) {
|
|
|
|
|
t.pos += 1
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
if t.pos >= len(t.input) {
|
|
|
|
|
return nil
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
start := t.pos
|
|
|
|
|
c := t.input[t.pos]
|
|
|
|
|
|
|
|
|
|
// Single-character tokens
|
|
|
|
|
if c == '(' || c == ')' || c == ',' {
|
|
|
|
|
t.pos += 1
|
|
|
|
|
return t.input[start:t.pos]
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
// Two-character operators
|
|
|
|
|
if t.pos + 1 < len(t.input) {
|
|
|
|
|
two := t.input[t.pos:t.pos + 2]
|
|
|
|
|
if two == "<=" || two == ">=" || two == "<>" {
|
|
|
|
|
t.pos += 2
|
|
|
|
|
return two
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
// Single-character operators
|
|
|
|
|
if c == '=' || c == '<' || c == '>' {
|
|
|
|
|
t.pos += 1
|
|
|
|
|
return t.input[start:t.pos]
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
// Identifier or keyword (includes :placeholder and #name)
|
|
|
|
|
for t.pos < len(t.input) && is_ident_char(t.input[t.pos]) {
|
|
|
|
|
t.pos += 1
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
if t.pos > start {
|
|
|
|
|
return t.input[start:t.pos]
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
// Unknown character, skip it
|
|
|
|
|
t.pos += 1
|
|
|
|
|
return tokenizer_next(t)
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
@(private = "file")
|
|
|
|
|
is_whitespace :: proc(c: byte) -> bool {
|
|
|
|
|
return c == ' ' || c == '\t' || c == '\n' || c == '\r'
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
@(private = "file")
|
|
|
|
|
is_ident_char :: proc(c: byte) -> bool {
|
|
|
|
|
return (c >= 'a' && c <= 'z') ||
|
|
|
|
|
(c >= 'A' && c <= 'Z') ||
|
|
|
|
|
(c >= '0' && c <= '9') ||
|
|
|
|
|
c == '_' || c == ':' || c == '#' || c == '-' || c == '.'
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
// ---------------------------------------------------------------------------
|
|
|
|
|
// Helper: convert Maybe(string) tokens into (string, bool) so or_return works.
|
|
|
|
|
// ---------------------------------------------------------------------------
|
|
|
|
|
|
2026-02-15 23:38:48 -05:00
|
|
|
// Package-visible: used by update.odin and filter.odin
|
2026-02-15 15:04:43 -05:00
|
|
|
next_token :: proc(t: ^Tokenizer) -> (tok: string, ok: bool) {
|
|
|
|
|
if v, has := tokenizer_next(t).?; has {
|
|
|
|
|
tok = v
|
|
|
|
|
ok = true
|
|
|
|
|
return
|
|
|
|
|
}
|
|
|
|
|
return
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
// ============================================================================
|
|
|
|
|
// Expression Parsing
|
|
|
|
|
// ============================================================================
|
|
|
|
|
|
|
|
|
|
parse_key_condition_expression :: proc(
|
|
|
|
|
expression: string,
|
|
|
|
|
attribute_names: Maybe(map[string]string),
|
|
|
|
|
attribute_values: map[string]Attribute_Value,
|
|
|
|
|
) -> (kc: Key_Condition, ok: bool) {
|
|
|
|
|
t := tokenizer_init(expression)
|
|
|
|
|
|
|
|
|
|
pk_name_token := next_token(&t) or_return
|
2026-02-16 03:11:11 -05:00
|
|
|
pk_name_unowned := resolve_attribute_name(pk_name_token, attribute_names) or_return
|
|
|
|
|
pk_name := strings.clone(pk_name_unowned) // Clone for safe storage
|
2026-02-15 15:04:43 -05:00
|
|
|
|
|
|
|
|
eq_token := next_token(&t) or_return
|
|
|
|
|
if eq_token != "=" {
|
2026-02-16 03:24:54 -05:00
|
|
|
delete(pk_name) // free on error
|
2026-02-15 15:04:43 -05:00
|
|
|
return
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
pk_value_token := next_token(&t) or_return
|
|
|
|
|
pk_value, pk_ok := resolve_attribute_value(pk_value_token, attribute_values)
|
|
|
|
|
if !pk_ok {
|
2026-02-16 03:24:54 -05:00
|
|
|
delete(pk_name) // free on error
|
2026-02-15 15:04:43 -05:00
|
|
|
return
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
sk_condition: Maybe(Sort_Key_Condition) = nil
|
|
|
|
|
|
|
|
|
|
// Optional "AND ..."
|
|
|
|
|
if and_token, has_and := tokenizer_next(&t).?; has_and {
|
|
|
|
|
if !strings.equal_fold(and_token, "AND") {
|
2026-02-16 03:24:54 -05:00
|
|
|
delete(pk_name) // free on error
|
2026-02-15 15:04:43 -05:00
|
|
|
attr_value_destroy(&pk_value)
|
|
|
|
|
return
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
skc, skc_ok := parse_sort_key_condition(&t, attribute_names, attribute_values)
|
|
|
|
|
if !skc_ok {
|
2026-02-16 03:24:54 -05:00
|
|
|
delete(pk_name) // free on error
|
2026-02-15 15:04:43 -05:00
|
|
|
attr_value_destroy(&pk_value)
|
|
|
|
|
return
|
|
|
|
|
}
|
|
|
|
|
sk_condition = skc
|
|
|
|
|
}
|
|
|
|
|
|
2026-02-16 06:57:18 -05:00
|
|
|
// Verify all tokens were consumed (no trailing garbage)
|
|
|
|
|
if trailing := tokenizer_next(&t); trailing != nil {
|
|
|
|
|
delete(pk_name)
|
|
|
|
|
attr_value_destroy(&pk_value)
|
|
|
|
|
if skc, has_skc := sk_condition.?; has_skc {
|
|
|
|
|
skc_copy := skc
|
|
|
|
|
sort_key_condition_destroy(&skc_copy)
|
|
|
|
|
}
|
|
|
|
|
return
|
|
|
|
|
}
|
2026-02-15 15:04:43 -05:00
|
|
|
|
|
|
|
|
kc = Key_Condition{
|
|
|
|
|
pk_name = pk_name,
|
|
|
|
|
pk_value = pk_value,
|
|
|
|
|
sk_condition = sk_condition,
|
|
|
|
|
}
|
|
|
|
|
ok = true
|
|
|
|
|
return
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
@(private = "file")
|
|
|
|
|
parse_sort_key_condition :: proc(
|
|
|
|
|
t: ^Tokenizer,
|
|
|
|
|
attribute_names: Maybe(map[string]string),
|
|
|
|
|
attribute_values: map[string]Attribute_Value,
|
|
|
|
|
) -> (skc: Sort_Key_Condition, ok: bool) {
|
|
|
|
|
first_token := next_token(t) or_return
|
|
|
|
|
|
|
|
|
|
if strings.equal_fold(first_token, "begins_with") {
|
|
|
|
|
skc, ok = parse_begins_with(t, attribute_names, attribute_values)
|
|
|
|
|
return
|
|
|
|
|
}
|
|
|
|
|
|
2026-02-16 03:11:11 -05:00
|
|
|
sk_name_unowned := resolve_attribute_name(first_token, attribute_names) or_return
|
|
|
|
|
sk_name := strings.clone(sk_name_unowned) // Clone for safe storage
|
2026-02-15 15:04:43 -05:00
|
|
|
|
|
|
|
|
op_token := next_token(t) or_return
|
|
|
|
|
operator, op_ok := parse_operator(op_token)
|
|
|
|
|
if !op_ok {
|
2026-02-16 03:24:54 -05:00
|
|
|
delete(sk_name) // free on error
|
2026-02-15 15:04:43 -05:00
|
|
|
return
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
value_token := next_token(t) or_return
|
|
|
|
|
value, val_ok := resolve_attribute_value(value_token, attribute_values)
|
|
|
|
|
if !val_ok {
|
2026-02-16 03:24:54 -05:00
|
|
|
delete(sk_name) // free on error
|
2026-02-15 15:04:43 -05:00
|
|
|
return
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
value2: Maybe(Attribute_Value) = nil
|
|
|
|
|
if operator == .BETWEEN {
|
|
|
|
|
// IMPORTANT: after allocating `value`, do NOT use `or_return` without cleanup.
|
|
|
|
|
and_token, tok_ok := next_token(t)
|
|
|
|
|
if !tok_ok || !strings.equal_fold(and_token, "AND") {
|
2026-02-16 03:24:54 -05:00
|
|
|
delete(sk_name) // free on error
|
2026-02-15 15:04:43 -05:00
|
|
|
attr_value_destroy(&value)
|
|
|
|
|
return
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
value2_token, tok2_ok := next_token(t)
|
|
|
|
|
if !tok2_ok {
|
2026-02-16 03:24:54 -05:00
|
|
|
delete(sk_name) // free on error
|
2026-02-15 15:04:43 -05:00
|
|
|
attr_value_destroy(&value)
|
|
|
|
|
return
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
v2, v2_ok := resolve_attribute_value(value2_token, attribute_values)
|
|
|
|
|
if !v2_ok {
|
2026-02-16 03:24:54 -05:00
|
|
|
delete(sk_name) // free on error
|
2026-02-15 15:04:43 -05:00
|
|
|
attr_value_destroy(&value)
|
|
|
|
|
return
|
|
|
|
|
}
|
|
|
|
|
value2 = v2
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
skc = Sort_Key_Condition{
|
|
|
|
|
sk_name = sk_name,
|
|
|
|
|
operator = operator,
|
|
|
|
|
value = value,
|
|
|
|
|
value2 = value2,
|
|
|
|
|
}
|
|
|
|
|
ok = true
|
|
|
|
|
return
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
@(private = "file")
|
|
|
|
|
parse_begins_with :: proc(
|
|
|
|
|
t: ^Tokenizer,
|
|
|
|
|
attribute_names: Maybe(map[string]string),
|
|
|
|
|
attribute_values: map[string]Attribute_Value,
|
|
|
|
|
) -> (skc: Sort_Key_Condition, ok: bool) {
|
|
|
|
|
lparen := next_token(t) or_return
|
|
|
|
|
if lparen != "(" {
|
|
|
|
|
return
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
sk_name_token := next_token(t) or_return
|
2026-02-16 03:11:11 -05:00
|
|
|
sk_name_unowned := resolve_attribute_name(sk_name_token, attribute_names) or_return
|
|
|
|
|
sk_name := strings.clone(sk_name_unowned) // Clone for safe storage
|
2026-02-15 15:04:43 -05:00
|
|
|
|
|
|
|
|
comma := next_token(t) or_return
|
|
|
|
|
if comma != "," {
|
2026-02-16 03:24:54 -05:00
|
|
|
delete(sk_name) // free on error
|
2026-02-15 15:04:43 -05:00
|
|
|
return
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
value_token := next_token(t) or_return
|
|
|
|
|
value, val_ok := resolve_attribute_value(value_token, attribute_values)
|
|
|
|
|
if !val_ok {
|
2026-02-16 03:24:54 -05:00
|
|
|
delete(sk_name) // free on error
|
2026-02-15 15:04:43 -05:00
|
|
|
return
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
// after allocating `value`, avoid `or_return` so we can clean up
|
|
|
|
|
rparen, tok_ok := next_token(t)
|
|
|
|
|
if !tok_ok || rparen != ")" {
|
2026-02-16 03:24:54 -05:00
|
|
|
delete(sk_name) // free on error
|
2026-02-15 15:04:43 -05:00
|
|
|
attr_value_destroy(&value)
|
|
|
|
|
return
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
skc = Sort_Key_Condition{
|
|
|
|
|
sk_name = sk_name,
|
|
|
|
|
operator = .BEGINS_WITH,
|
|
|
|
|
value = value,
|
|
|
|
|
value2 = nil,
|
|
|
|
|
}
|
|
|
|
|
ok = true
|
|
|
|
|
return
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
@(private = "file")
|
|
|
|
|
parse_operator :: proc(token: string) -> (Sort_Key_Operator, bool) {
|
|
|
|
|
if token == "=" do return .EQ, true
|
|
|
|
|
if token == "<" do return .LT, true
|
|
|
|
|
if token == "<=" do return .LE, true
|
|
|
|
|
if token == ">" do return .GT, true
|
|
|
|
|
if token == ">=" do return .GE, true
|
|
|
|
|
if strings.equal_fold(token, "BETWEEN") do return .BETWEEN, true
|
|
|
|
|
return .EQ, false
|
|
|
|
|
}
|
|
|
|
|
|
2026-02-15 23:38:48 -05:00
|
|
|
// Package-visible: used by update.odin and filter.odin
|
2026-02-15 15:04:43 -05:00
|
|
|
resolve_attribute_name :: proc(token: string, names: Maybe(map[string]string)) -> (string, bool) {
|
|
|
|
|
if len(token) > 0 && token[0] == '#' {
|
|
|
|
|
if n, has_names := names.?; has_names {
|
|
|
|
|
if resolved, found := n[token]; found {
|
|
|
|
|
return resolved, true
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
return "", false
|
|
|
|
|
}
|
|
|
|
|
return token, true
|
|
|
|
|
}
|
|
|
|
|
|
2026-02-15 23:38:48 -05:00
|
|
|
// Package-visible: used by update.odin and filter.odin
|
2026-02-15 15:04:43 -05:00
|
|
|
resolve_attribute_value :: proc(
|
|
|
|
|
token: string,
|
|
|
|
|
values: map[string]Attribute_Value,
|
|
|
|
|
) -> (Attribute_Value, bool) {
|
|
|
|
|
if len(token) > 0 && token[0] == ':' {
|
|
|
|
|
if original, found := values[token]; found {
|
|
|
|
|
return attr_value_deep_copy(original), true
|
|
|
|
|
}
|
|
|
|
|
return nil, false
|
|
|
|
|
}
|
|
|
|
|
return nil, false
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
// ============================================================================
|
|
|
|
|
// Request Parsing Helpers
|
|
|
|
|
// ============================================================================
|
|
|
|
|
|
|
|
|
|
parse_expression_attribute_names :: proc(request_body: []byte) -> Maybe(map[string]string) {
|
|
|
|
|
data, parse_err := json.parse(request_body, allocator = context.temp_allocator)
|
|
|
|
|
if parse_err != nil {
|
|
|
|
|
return nil
|
|
|
|
|
}
|
|
|
|
|
defer json.destroy_value(data)
|
|
|
|
|
|
|
|
|
|
root, ok := data.(json.Object)
|
|
|
|
|
if !ok {
|
|
|
|
|
return nil
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
names_val, found := root["ExpressionAttributeNames"]
|
|
|
|
|
if !found {
|
|
|
|
|
return nil
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
names_obj, names_ok := names_val.(json.Object)
|
|
|
|
|
if !names_ok {
|
|
|
|
|
return nil
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
result := make(map[string]string)
|
|
|
|
|
|
|
|
|
|
for key, val in names_obj {
|
|
|
|
|
str, str_ok := val.(json.String)
|
|
|
|
|
if !str_ok {
|
|
|
|
|
continue
|
|
|
|
|
}
|
|
|
|
|
result[strings.clone(key)] = strings.clone(string(str))
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
return result
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
parse_expression_attribute_values :: proc(request_body: []byte) -> (map[string]Attribute_Value, bool) {
|
|
|
|
|
data, parse_err := json.parse(request_body, allocator = context.temp_allocator)
|
|
|
|
|
if parse_err != nil {
|
2026-02-16 04:49:10 -05:00
|
|
|
return make(map[string]Attribute_Value), false
|
2026-02-15 15:04:43 -05:00
|
|
|
}
|
|
|
|
|
defer json.destroy_value(data)
|
|
|
|
|
|
|
|
|
|
root, ok := data.(json.Object)
|
|
|
|
|
if !ok {
|
2026-02-16 04:49:10 -05:00
|
|
|
return make(map[string]Attribute_Value), false
|
2026-02-15 15:04:43 -05:00
|
|
|
}
|
|
|
|
|
|
|
|
|
|
values_val, found := root["ExpressionAttributeValues"]
|
|
|
|
|
if !found {
|
|
|
|
|
return make(map[string]Attribute_Value), true
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
values_obj, values_ok := values_val.(json.Object)
|
|
|
|
|
if !values_ok {
|
2026-02-16 04:49:10 -05:00
|
|
|
return make(map[string]Attribute_Value), false
|
2026-02-15 15:04:43 -05:00
|
|
|
}
|
|
|
|
|
|
|
|
|
|
result := make(map[string]Attribute_Value)
|
|
|
|
|
|
|
|
|
|
for key, val in values_obj {
|
|
|
|
|
attr, attr_ok := parse_attribute_value(val)
|
|
|
|
|
if !attr_ok {
|
2026-02-16 10:52:35 -05:00
|
|
|
// Clean up already-parsed values before returning error
|
|
|
|
|
for k, &v in result {
|
|
|
|
|
attr_value_destroy(&v)
|
|
|
|
|
delete(k)
|
|
|
|
|
}
|
|
|
|
|
delete(result)
|
|
|
|
|
return make(map[string]Attribute_Value), false
|
2026-02-15 15:04:43 -05:00
|
|
|
}
|
|
|
|
|
result[strings.clone(key)] = attr
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
return result, true
|
|
|
|
|
}
|
|
|
|
|
|
2026-02-15 20:57:16 -05:00
|
|
|
// ============================================================================
|
|
|
|
|
// FIX: Use JSON object lookup instead of fragile string scanning.
|
|
|
|
|
// This handles whitespace, field ordering, and escape sequences correctly.
|
|
|
|
|
// ============================================================================
|
2026-02-15 15:04:43 -05:00
|
|
|
parse_key_condition_expression_string :: proc(request_body: []byte) -> (expr: string, ok: bool) {
|
2026-02-15 20:57:16 -05:00
|
|
|
data, parse_err := json.parse(request_body, allocator = context.temp_allocator)
|
|
|
|
|
if parse_err != nil {
|
2026-02-15 15:04:43 -05:00
|
|
|
return
|
|
|
|
|
}
|
2026-02-15 20:57:16 -05:00
|
|
|
defer json.destroy_value(data)
|
2026-02-15 15:04:43 -05:00
|
|
|
|
2026-02-15 20:57:16 -05:00
|
|
|
root, root_ok := data.(json.Object)
|
|
|
|
|
if !root_ok {
|
2026-02-15 15:04:43 -05:00
|
|
|
return
|
|
|
|
|
}
|
|
|
|
|
|
2026-02-15 20:57:16 -05:00
|
|
|
kce_val, found := root["KeyConditionExpression"]
|
|
|
|
|
if !found {
|
2026-02-15 15:04:43 -05:00
|
|
|
return
|
|
|
|
|
}
|
|
|
|
|
|
2026-02-15 20:57:16 -05:00
|
|
|
kce_str, str_ok := kce_val.(json.String)
|
|
|
|
|
if !str_ok {
|
|
|
|
|
return
|
2026-02-15 15:04:43 -05:00
|
|
|
}
|
|
|
|
|
|
2026-02-17 02:03:40 -05:00
|
|
|
expr = strings.clone(string(kce_str))
|
2026-02-15 20:57:16 -05:00
|
|
|
ok = true
|
2026-02-15 15:04:43 -05:00
|
|
|
return
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
// Convenience: parse a complete Query key condition from request body
|
|
|
|
|
parse_query_key_condition :: proc(request_body: []byte) -> (kc: Key_Condition, ok: bool) {
|
|
|
|
|
expression := parse_key_condition_expression_string(request_body) or_return
|
2026-02-17 02:03:40 -05:00
|
|
|
defer delete(expression)
|
2026-02-15 15:04:43 -05:00
|
|
|
|
|
|
|
|
attr_names := parse_expression_attribute_names(request_body)
|
|
|
|
|
defer {
|
|
|
|
|
if names, has_names := attr_names.?; has_names {
|
|
|
|
|
for k, v in names {
|
|
|
|
|
delete(k)
|
|
|
|
|
delete(v)
|
|
|
|
|
}
|
|
|
|
|
names_copy := names
|
|
|
|
|
delete(names_copy)
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
attr_values, vals_ok := parse_expression_attribute_values(request_body)
|
|
|
|
|
if !vals_ok {
|
|
|
|
|
return
|
|
|
|
|
}
|
|
|
|
|
defer {
|
|
|
|
|
for k, v in attr_values {
|
|
|
|
|
delete(k)
|
|
|
|
|
v_copy := v
|
|
|
|
|
attr_value_destroy(&v_copy)
|
|
|
|
|
}
|
|
|
|
|
delete(attr_values)
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
kc, ok = parse_key_condition_expression(expression, attr_names, attr_values)
|
|
|
|
|
return
|
|
|
|
|
}
|