2026-02-15 12:13:46 -05:00
|
|
|
// DynamoDB JSON parsing and serialization
|
|
|
|
|
// Pure functions for converting between DynamoDB JSON format and internal types
|
|
|
|
|
package dynamodb
|
|
|
|
|
|
|
|
|
|
import "core:encoding/json"
|
|
|
|
|
import "core:fmt"
|
|
|
|
|
import "core:slice"
|
|
|
|
|
import "core:strings"
|
|
|
|
|
|
|
|
|
|
// ============================================================================
|
|
|
|
|
// Parsing (JSON → Types)
|
|
|
|
|
// ============================================================================
|
|
|
|
|
|
|
|
|
|
// Parse DynamoDB JSON format into an Item
|
|
|
|
|
// Caller owns returned Item
|
|
|
|
|
parse_item :: proc(json_bytes: []byte) -> (Item, bool) {
|
|
|
|
|
data, parse_err := json.parse(json_bytes, allocator = context.allocator)
|
|
|
|
|
if parse_err != nil {
|
|
|
|
|
return {}, false
|
|
|
|
|
}
|
|
|
|
|
defer json.destroy_value(data)
|
2026-02-15 13:56:08 -05:00
|
|
|
|
2026-02-15 12:13:46 -05:00
|
|
|
return parse_item_from_value(data)
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
// Parse an Item from an already-parsed JSON Value
|
|
|
|
|
// More efficient when you already have a Value (e.g., from request body parsing)
|
|
|
|
|
parse_item_from_value :: proc(value: json.Value) -> (Item, bool) {
|
|
|
|
|
obj, ok := value.(json.Object)
|
|
|
|
|
if !ok {
|
|
|
|
|
return {}, false
|
|
|
|
|
}
|
2026-02-15 13:56:08 -05:00
|
|
|
|
2026-02-15 12:13:46 -05:00
|
|
|
item := make(Item)
|
2026-02-15 13:56:08 -05:00
|
|
|
|
2026-02-15 12:13:46 -05:00
|
|
|
for key, val in obj {
|
|
|
|
|
attr_name := strings.clone(key)
|
2026-02-15 13:56:08 -05:00
|
|
|
|
2026-02-15 12:13:46 -05:00
|
|
|
attr_value, attr_ok := parse_attribute_value(val)
|
|
|
|
|
if !attr_ok {
|
|
|
|
|
// Cleanup on error
|
|
|
|
|
for k, v in item {
|
|
|
|
|
delete(k)
|
|
|
|
|
v_copy := v
|
|
|
|
|
attr_value_destroy(&v_copy)
|
|
|
|
|
}
|
|
|
|
|
delete(item)
|
|
|
|
|
delete(attr_name)
|
|
|
|
|
return {}, false
|
|
|
|
|
}
|
2026-02-15 13:56:08 -05:00
|
|
|
|
2026-02-15 12:13:46 -05:00
|
|
|
item[attr_name] = attr_value
|
|
|
|
|
}
|
2026-02-15 13:56:08 -05:00
|
|
|
|
2026-02-15 12:13:46 -05:00
|
|
|
return item, true
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
// Parse a single DynamoDB AttributeValue from JSON
|
|
|
|
|
// Format: {"S": "value"}, {"N": "123"}, {"M": {...}}, etc.
|
|
|
|
|
parse_attribute_value :: proc(value: json.Value) -> (Attribute_Value, bool) {
|
|
|
|
|
obj, ok := value.(json.Object)
|
|
|
|
|
if !ok {
|
|
|
|
|
return nil, false
|
|
|
|
|
}
|
2026-02-15 13:56:08 -05:00
|
|
|
|
2026-02-15 12:13:46 -05:00
|
|
|
// DynamoDB attribute must have exactly one key (the type indicator)
|
|
|
|
|
if len(obj) != 1 {
|
|
|
|
|
return nil, false
|
|
|
|
|
}
|
2026-02-15 13:56:08 -05:00
|
|
|
|
2026-02-15 12:13:46 -05:00
|
|
|
// Get the single key-value pair
|
|
|
|
|
for type_name, type_value in obj {
|
|
|
|
|
// String
|
|
|
|
|
if type_name == "S" {
|
|
|
|
|
str, str_ok := type_value.(json.String)
|
|
|
|
|
if !str_ok {
|
|
|
|
|
return nil, false
|
|
|
|
|
}
|
|
|
|
|
return String(strings.clone(string(str))), true
|
|
|
|
|
}
|
2026-02-15 13:56:08 -05:00
|
|
|
|
2026-02-15 12:13:46 -05:00
|
|
|
// Number (stored as string)
|
|
|
|
|
if type_name == "N" {
|
|
|
|
|
str, str_ok := type_value.(json.String)
|
|
|
|
|
if !str_ok {
|
|
|
|
|
return nil, false
|
|
|
|
|
}
|
2026-02-16 08:45:30 -05:00
|
|
|
|
|
|
|
|
// Parse into DDB_Number
|
|
|
|
|
ddb_num, num_ok := parse_ddb_number(string(str))
|
|
|
|
|
if !num_ok {
|
|
|
|
|
return nil, false
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
// Clone the string fields since they're slices of the input
|
|
|
|
|
owned_num := clone_ddb_number(ddb_num)
|
|
|
|
|
return owned_num, true
|
2026-02-15 12:13:46 -05:00
|
|
|
}
|
2026-02-15 13:56:08 -05:00
|
|
|
|
2026-02-15 12:13:46 -05:00
|
|
|
// Binary (base64 string)
|
|
|
|
|
if type_name == "B" {
|
|
|
|
|
str, str_ok := type_value.(json.String)
|
|
|
|
|
if !str_ok {
|
|
|
|
|
return nil, false
|
|
|
|
|
}
|
|
|
|
|
return Binary(strings.clone(string(str))), true
|
|
|
|
|
}
|
2026-02-15 13:56:08 -05:00
|
|
|
|
2026-02-15 12:13:46 -05:00
|
|
|
// Boolean
|
|
|
|
|
if type_name == "BOOL" {
|
|
|
|
|
b, b_ok := type_value.(json.Boolean)
|
|
|
|
|
if !b_ok {
|
|
|
|
|
return nil, false
|
|
|
|
|
}
|
|
|
|
|
return Bool(b), true
|
|
|
|
|
}
|
2026-02-15 13:56:08 -05:00
|
|
|
|
2026-02-15 12:13:46 -05:00
|
|
|
// Null
|
|
|
|
|
if type_name == "NULL" {
|
|
|
|
|
b, b_ok := type_value.(json.Boolean)
|
|
|
|
|
if !b_ok {
|
|
|
|
|
return nil, false
|
|
|
|
|
}
|
|
|
|
|
return Null(b), true
|
|
|
|
|
}
|
2026-02-15 13:56:08 -05:00
|
|
|
|
2026-02-15 12:13:46 -05:00
|
|
|
// String Set
|
|
|
|
|
if type_name == "SS" {
|
|
|
|
|
arr, arr_ok := type_value.(json.Array)
|
|
|
|
|
if !arr_ok {
|
|
|
|
|
return nil, false
|
|
|
|
|
}
|
2026-02-15 13:56:08 -05:00
|
|
|
|
2026-02-15 12:13:46 -05:00
|
|
|
strings_arr := make([]string, len(arr))
|
2026-02-15 13:56:08 -05:00
|
|
|
|
2026-02-15 12:13:46 -05:00
|
|
|
for item, i in arr {
|
|
|
|
|
str, str_ok := item.(json.String)
|
|
|
|
|
if !str_ok {
|
|
|
|
|
// Cleanup on error
|
|
|
|
|
for j in 0..<i {
|
|
|
|
|
delete(strings_arr[j])
|
|
|
|
|
}
|
|
|
|
|
delete(strings_arr)
|
|
|
|
|
return nil, false
|
|
|
|
|
}
|
|
|
|
|
strings_arr[i] = strings.clone(string(str))
|
|
|
|
|
}
|
2026-02-15 13:56:08 -05:00
|
|
|
|
2026-02-15 12:13:46 -05:00
|
|
|
return String_Set(strings_arr), true
|
|
|
|
|
}
|
2026-02-15 13:56:08 -05:00
|
|
|
|
2026-02-15 12:13:46 -05:00
|
|
|
// Number Set
|
|
|
|
|
if type_name == "NS" {
|
|
|
|
|
arr, arr_ok := type_value.(json.Array)
|
|
|
|
|
if !arr_ok {
|
|
|
|
|
return nil, false
|
|
|
|
|
}
|
2026-02-15 13:56:08 -05:00
|
|
|
|
2026-02-16 08:45:30 -05:00
|
|
|
numbers_arr := make([]DDB_Number, len(arr))
|
2026-02-15 13:56:08 -05:00
|
|
|
|
2026-02-15 12:13:46 -05:00
|
|
|
for item, i in arr {
|
|
|
|
|
str, str_ok := item.(json.String)
|
|
|
|
|
if !str_ok {
|
|
|
|
|
// Cleanup on error
|
|
|
|
|
for j in 0..<i {
|
2026-02-16 08:45:30 -05:00
|
|
|
// Clean up DDB_Numbers
|
|
|
|
|
delete(numbers_arr[j].integer_part)
|
|
|
|
|
delete(numbers_arr[j].fractional_part)
|
|
|
|
|
}
|
|
|
|
|
delete(numbers_arr)
|
|
|
|
|
return nil, false
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
// Parse into DDB_Number
|
|
|
|
|
ddb_num, num_ok := parse_ddb_number(string(str))
|
|
|
|
|
if !num_ok {
|
|
|
|
|
// Cleanup on error
|
|
|
|
|
for j in 0..<i {
|
|
|
|
|
delete(numbers_arr[j].integer_part)
|
|
|
|
|
delete(numbers_arr[j].fractional_part)
|
2026-02-15 12:13:46 -05:00
|
|
|
}
|
|
|
|
|
delete(numbers_arr)
|
|
|
|
|
return nil, false
|
|
|
|
|
}
|
2026-02-16 08:45:30 -05:00
|
|
|
|
|
|
|
|
// Clone and store
|
|
|
|
|
numbers_arr[i] = clone_ddb_number(ddb_num)
|
2026-02-15 12:13:46 -05:00
|
|
|
}
|
2026-02-15 13:56:08 -05:00
|
|
|
|
2026-02-16 08:45:30 -05:00
|
|
|
return DDB_Number_Set(numbers_arr), true
|
2026-02-15 12:13:46 -05:00
|
|
|
}
|
2026-02-15 13:56:08 -05:00
|
|
|
|
2026-02-15 12:13:46 -05:00
|
|
|
// Binary Set
|
|
|
|
|
if type_name == "BS" {
|
|
|
|
|
arr, arr_ok := type_value.(json.Array)
|
|
|
|
|
if !arr_ok {
|
|
|
|
|
return nil, false
|
|
|
|
|
}
|
2026-02-15 13:56:08 -05:00
|
|
|
|
2026-02-15 12:13:46 -05:00
|
|
|
binaries_arr := make([]string, len(arr))
|
2026-02-15 13:56:08 -05:00
|
|
|
|
2026-02-15 12:13:46 -05:00
|
|
|
for item, i in arr {
|
|
|
|
|
str, str_ok := item.(json.String)
|
|
|
|
|
if !str_ok {
|
|
|
|
|
// Cleanup on error
|
|
|
|
|
for j in 0..<i {
|
|
|
|
|
delete(binaries_arr[j])
|
|
|
|
|
}
|
|
|
|
|
delete(binaries_arr)
|
|
|
|
|
return nil, false
|
|
|
|
|
}
|
|
|
|
|
binaries_arr[i] = strings.clone(string(str))
|
|
|
|
|
}
|
2026-02-15 13:56:08 -05:00
|
|
|
|
2026-02-15 12:13:46 -05:00
|
|
|
return Binary_Set(binaries_arr), true
|
|
|
|
|
}
|
2026-02-15 13:56:08 -05:00
|
|
|
|
2026-02-15 12:13:46 -05:00
|
|
|
// List
|
|
|
|
|
if type_name == "L" {
|
|
|
|
|
arr, arr_ok := type_value.(json.Array)
|
|
|
|
|
if !arr_ok {
|
|
|
|
|
return nil, false
|
|
|
|
|
}
|
2026-02-15 13:56:08 -05:00
|
|
|
|
2026-02-15 12:13:46 -05:00
|
|
|
list := make([]Attribute_Value, len(arr))
|
2026-02-15 13:56:08 -05:00
|
|
|
|
2026-02-15 12:13:46 -05:00
|
|
|
for item, i in arr {
|
|
|
|
|
val, val_ok := parse_attribute_value(item)
|
|
|
|
|
if !val_ok {
|
|
|
|
|
// Cleanup on error
|
|
|
|
|
for j in 0..<i {
|
|
|
|
|
item_copy := list[j]
|
|
|
|
|
attr_value_destroy(&item_copy)
|
|
|
|
|
}
|
|
|
|
|
delete(list)
|
|
|
|
|
return nil, false
|
|
|
|
|
}
|
|
|
|
|
list[i] = val
|
|
|
|
|
}
|
2026-02-15 13:56:08 -05:00
|
|
|
|
2026-02-15 12:13:46 -05:00
|
|
|
return List(list), true
|
|
|
|
|
}
|
2026-02-15 13:56:08 -05:00
|
|
|
|
2026-02-15 12:13:46 -05:00
|
|
|
// Map
|
|
|
|
|
if type_name == "M" {
|
|
|
|
|
map_obj, map_ok := type_value.(json.Object)
|
|
|
|
|
if !map_ok {
|
|
|
|
|
return nil, false
|
|
|
|
|
}
|
2026-02-15 13:56:08 -05:00
|
|
|
|
2026-02-15 12:13:46 -05:00
|
|
|
attr_map := make(map[string]Attribute_Value)
|
2026-02-15 13:56:08 -05:00
|
|
|
|
2026-02-15 12:13:46 -05:00
|
|
|
for map_key, map_val in map_obj {
|
|
|
|
|
key := strings.clone(map_key)
|
2026-02-15 13:56:08 -05:00
|
|
|
|
2026-02-15 12:13:46 -05:00
|
|
|
val, val_ok := parse_attribute_value(map_val)
|
|
|
|
|
if !val_ok {
|
|
|
|
|
// Cleanup on error
|
|
|
|
|
delete(key)
|
|
|
|
|
for k, v in attr_map {
|
|
|
|
|
delete(k)
|
|
|
|
|
v_copy := v
|
|
|
|
|
attr_value_destroy(&v_copy)
|
|
|
|
|
}
|
|
|
|
|
delete(attr_map)
|
|
|
|
|
return nil, false
|
|
|
|
|
}
|
2026-02-15 13:56:08 -05:00
|
|
|
|
2026-02-15 12:13:46 -05:00
|
|
|
attr_map[key] = val
|
|
|
|
|
}
|
2026-02-15 13:56:08 -05:00
|
|
|
|
2026-02-15 12:13:46 -05:00
|
|
|
return Map(attr_map), true
|
|
|
|
|
}
|
|
|
|
|
}
|
2026-02-15 13:56:08 -05:00
|
|
|
|
2026-02-15 12:13:46 -05:00
|
|
|
return nil, false
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
// ============================================================================
|
|
|
|
|
// Serialization (Types → JSON)
|
|
|
|
|
// ============================================================================
|
|
|
|
|
|
|
|
|
|
// Serialize an Item to canonical DynamoDB JSON format
|
|
|
|
|
// Keys are sorted alphabetically for deterministic output
|
|
|
|
|
serialize_item :: proc(item: Item) -> string {
|
|
|
|
|
builder := strings.builder_make()
|
|
|
|
|
defer strings.builder_destroy(&builder)
|
2026-02-15 13:56:08 -05:00
|
|
|
|
2026-02-15 12:13:46 -05:00
|
|
|
serialize_item_to_builder(&builder, item)
|
2026-02-15 13:56:08 -05:00
|
|
|
|
2026-02-15 12:13:46 -05:00
|
|
|
return strings.clone(strings.to_string(builder))
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
// Serialize an Item to a strings.Builder with deterministic ordering
|
|
|
|
|
serialize_item_to_builder :: proc(b: ^strings.Builder, item: Item) {
|
|
|
|
|
// Collect and sort keys for deterministic output
|
|
|
|
|
keys := make([dynamic]string, context.temp_allocator)
|
|
|
|
|
defer delete(keys)
|
2026-02-15 13:56:08 -05:00
|
|
|
|
2026-02-15 12:13:46 -05:00
|
|
|
for key in item {
|
|
|
|
|
append(&keys, key)
|
|
|
|
|
}
|
2026-02-15 13:56:08 -05:00
|
|
|
|
2026-02-15 12:13:46 -05:00
|
|
|
// Sort keys alphabetically
|
|
|
|
|
slice.sort_by(keys[:], proc(a, b: string) -> bool {
|
|
|
|
|
return a < b
|
|
|
|
|
})
|
2026-02-15 13:56:08 -05:00
|
|
|
|
2026-02-15 12:13:46 -05:00
|
|
|
strings.write_string(b, "{")
|
|
|
|
|
for key, i in keys {
|
|
|
|
|
if i > 0 {
|
|
|
|
|
strings.write_string(b, ",")
|
|
|
|
|
}
|
|
|
|
|
fmt.sbprintf(b, `"%s":`, key)
|
|
|
|
|
value := item[key]
|
|
|
|
|
serialize_attribute_value(b, value)
|
|
|
|
|
}
|
|
|
|
|
strings.write_string(b, "}")
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
// Serialize an AttributeValue to DynamoDB JSON format
|
|
|
|
|
serialize_attribute_value :: proc(b: ^strings.Builder, attr: Attribute_Value) {
|
|
|
|
|
switch v in attr {
|
|
|
|
|
case String:
|
|
|
|
|
fmt.sbprintf(b, `{"S":"%s"}`, string(v))
|
2026-02-15 13:56:08 -05:00
|
|
|
|
2026-02-16 08:45:30 -05:00
|
|
|
case DDB_Number:
|
|
|
|
|
num_str := format_ddb_number(v)
|
|
|
|
|
fmt.sbprintf(b, `{"N":"%s"}`, num_str)
|
|
|
|
|
|
2026-02-15 12:13:46 -05:00
|
|
|
case Binary:
|
|
|
|
|
fmt.sbprintf(b, `{"B":"%s"}`, string(v))
|
2026-02-15 13:56:08 -05:00
|
|
|
|
2026-02-15 12:13:46 -05:00
|
|
|
case Bool:
|
|
|
|
|
fmt.sbprintf(b, `{"BOOL":%v}`, bool(v))
|
2026-02-15 13:56:08 -05:00
|
|
|
|
2026-02-15 12:13:46 -05:00
|
|
|
case Null:
|
|
|
|
|
strings.write_string(b, `{"NULL":true}`)
|
2026-02-15 13:56:08 -05:00
|
|
|
|
2026-02-15 12:13:46 -05:00
|
|
|
case String_Set:
|
|
|
|
|
strings.write_string(b, `{"SS":[`)
|
|
|
|
|
for s, i in v {
|
|
|
|
|
if i > 0 {
|
|
|
|
|
strings.write_string(b, ",")
|
|
|
|
|
}
|
|
|
|
|
fmt.sbprintf(b, `"%s"`, s)
|
|
|
|
|
}
|
|
|
|
|
strings.write_string(b, "]}")
|
2026-02-15 13:56:08 -05:00
|
|
|
|
2026-02-16 08:45:30 -05:00
|
|
|
case DDB_Number_Set:
|
|
|
|
|
strings.write_string(b, `{"NS":[`)
|
|
|
|
|
for num, i in v {
|
|
|
|
|
if i > 0 {
|
|
|
|
|
strings.write_string(b, ",")
|
|
|
|
|
}
|
|
|
|
|
num_str := format_ddb_number(num)
|
|
|
|
|
fmt.sbprintf(b, `"%s"`, num_str)
|
|
|
|
|
}
|
|
|
|
|
strings.write_string(b, "]}")
|
|
|
|
|
|
2026-02-15 12:13:46 -05:00
|
|
|
case Binary_Set:
|
|
|
|
|
strings.write_string(b, `{"BS":[`)
|
|
|
|
|
for bin, i in v {
|
|
|
|
|
if i > 0 {
|
|
|
|
|
strings.write_string(b, ",")
|
|
|
|
|
}
|
|
|
|
|
fmt.sbprintf(b, `"%s"`, bin)
|
|
|
|
|
}
|
|
|
|
|
strings.write_string(b, "]}")
|
2026-02-15 13:56:08 -05:00
|
|
|
|
2026-02-15 12:13:46 -05:00
|
|
|
case List:
|
|
|
|
|
strings.write_string(b, `{"L":[`)
|
|
|
|
|
for item, i in v {
|
|
|
|
|
if i > 0 {
|
|
|
|
|
strings.write_string(b, ",")
|
|
|
|
|
}
|
|
|
|
|
serialize_attribute_value(b, item)
|
|
|
|
|
}
|
|
|
|
|
strings.write_string(b, "]}")
|
2026-02-15 13:56:08 -05:00
|
|
|
|
2026-02-15 12:13:46 -05:00
|
|
|
case Map:
|
|
|
|
|
strings.write_string(b, `{"M":{`)
|
2026-02-15 13:56:08 -05:00
|
|
|
|
2026-02-15 12:13:46 -05:00
|
|
|
// Collect and sort keys for deterministic output
|
|
|
|
|
keys := make([dynamic]string, context.temp_allocator)
|
|
|
|
|
for key in v {
|
|
|
|
|
append(&keys, key)
|
|
|
|
|
}
|
2026-02-15 13:56:08 -05:00
|
|
|
|
2026-02-15 12:13:46 -05:00
|
|
|
slice.sort_by(keys[:], proc(a, b: string) -> bool {
|
|
|
|
|
return a < b
|
|
|
|
|
})
|
2026-02-15 13:56:08 -05:00
|
|
|
|
2026-02-15 12:13:46 -05:00
|
|
|
for key, i in keys {
|
|
|
|
|
if i > 0 {
|
|
|
|
|
strings.write_string(b, ",")
|
|
|
|
|
}
|
|
|
|
|
fmt.sbprintf(b, `"%s":`, key)
|
|
|
|
|
value := v[key]
|
|
|
|
|
serialize_attribute_value(b, value)
|
|
|
|
|
}
|
2026-02-15 13:56:08 -05:00
|
|
|
|
2026-02-15 12:13:46 -05:00
|
|
|
strings.write_string(b, "}}")
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
// ============================================================================
|
|
|
|
|
// Request Parsing Helpers
|
|
|
|
|
// ============================================================================
|
|
|
|
|
|
|
|
|
|
// Extract table name from request body
|
|
|
|
|
parse_table_name :: proc(request_body: []byte) -> (string, bool) {
|
|
|
|
|
data, parse_err := json.parse(request_body, allocator = context.temp_allocator)
|
|
|
|
|
if parse_err != nil {
|
|
|
|
|
return "", false
|
|
|
|
|
}
|
|
|
|
|
defer json.destroy_value(data)
|
2026-02-15 13:56:08 -05:00
|
|
|
|
2026-02-15 12:13:46 -05:00
|
|
|
root, ok := data.(json.Object)
|
|
|
|
|
if !ok {
|
|
|
|
|
return "", false
|
|
|
|
|
}
|
2026-02-15 13:56:08 -05:00
|
|
|
|
2026-02-15 12:13:46 -05:00
|
|
|
table_name_val, found := root["TableName"]
|
|
|
|
|
if !found {
|
|
|
|
|
return "", false
|
|
|
|
|
}
|
2026-02-15 13:56:08 -05:00
|
|
|
|
2026-02-15 12:13:46 -05:00
|
|
|
table_name_str, str_ok := table_name_val.(json.String)
|
|
|
|
|
if !str_ok {
|
|
|
|
|
return "", false
|
|
|
|
|
}
|
2026-02-15 13:56:08 -05:00
|
|
|
|
2026-02-15 12:13:46 -05:00
|
|
|
return string(table_name_str), true
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
// Parse Item field from request body
|
|
|
|
|
// Returns owned Item
|
|
|
|
|
parse_item_from_request :: proc(request_body: []byte) -> (Item, bool) {
|
|
|
|
|
data, parse_err := json.parse(request_body, allocator = context.temp_allocator)
|
|
|
|
|
if parse_err != nil {
|
|
|
|
|
return {}, false
|
|
|
|
|
}
|
|
|
|
|
defer json.destroy_value(data)
|
2026-02-15 13:56:08 -05:00
|
|
|
|
2026-02-15 12:13:46 -05:00
|
|
|
root, ok := data.(json.Object)
|
|
|
|
|
if !ok {
|
|
|
|
|
return {}, false
|
|
|
|
|
}
|
2026-02-15 13:56:08 -05:00
|
|
|
|
2026-02-15 12:13:46 -05:00
|
|
|
item_val, found := root["Item"]
|
|
|
|
|
if !found {
|
|
|
|
|
return {}, false
|
|
|
|
|
}
|
2026-02-15 13:56:08 -05:00
|
|
|
|
2026-02-15 12:13:46 -05:00
|
|
|
return parse_item_from_value(item_val)
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
// Parse Key field from request body
|
|
|
|
|
// Returns owned Item representing the key
|
|
|
|
|
parse_key_from_request :: proc(request_body: []byte) -> (Item, bool) {
|
|
|
|
|
data, parse_err := json.parse(request_body, allocator = context.temp_allocator)
|
|
|
|
|
if parse_err != nil {
|
|
|
|
|
return {}, false
|
|
|
|
|
}
|
|
|
|
|
defer json.destroy_value(data)
|
2026-02-15 13:56:08 -05:00
|
|
|
|
2026-02-15 12:13:46 -05:00
|
|
|
root, ok := data.(json.Object)
|
|
|
|
|
if !ok {
|
|
|
|
|
return {}, false
|
|
|
|
|
}
|
2026-02-15 13:56:08 -05:00
|
|
|
|
2026-02-15 12:13:46 -05:00
|
|
|
key_val, found := root["Key"]
|
|
|
|
|
if !found {
|
|
|
|
|
return {}, false
|
|
|
|
|
}
|
2026-02-15 13:56:08 -05:00
|
|
|
|
2026-02-15 12:13:46 -05:00
|
|
|
return parse_item_from_value(key_val)
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
// ============================================================================
|
|
|
|
|
// Pagination Helpers
|
|
|
|
|
// ============================================================================
|
|
|
|
|
|
|
|
|
|
// Parse Limit from request body
|
|
|
|
|
// Returns 0 if not present
|
|
|
|
|
parse_limit :: proc(request_body: []byte) -> int {
|
|
|
|
|
data, parse_err := json.parse(request_body, allocator = context.temp_allocator)
|
|
|
|
|
if parse_err != nil {
|
|
|
|
|
return 0
|
|
|
|
|
}
|
|
|
|
|
defer json.destroy_value(data)
|
2026-02-15 13:56:08 -05:00
|
|
|
|
2026-02-15 12:13:46 -05:00
|
|
|
root, ok := data.(json.Object)
|
|
|
|
|
if !ok {
|
|
|
|
|
return 0
|
|
|
|
|
}
|
2026-02-15 13:56:08 -05:00
|
|
|
|
2026-02-15 12:13:46 -05:00
|
|
|
limit_val, found := root["Limit"]
|
|
|
|
|
if !found {
|
|
|
|
|
return 0
|
|
|
|
|
}
|
2026-02-15 13:56:08 -05:00
|
|
|
|
2026-02-15 12:13:46 -05:00
|
|
|
// JSON numbers can be either Integer or Float
|
|
|
|
|
#partial switch v in limit_val {
|
|
|
|
|
case json.Integer:
|
|
|
|
|
return int(v)
|
|
|
|
|
case json.Float:
|
|
|
|
|
return int(v)
|
|
|
|
|
}
|
2026-02-15 13:56:08 -05:00
|
|
|
|
2026-02-15 12:13:46 -05:00
|
|
|
return 0
|
|
|
|
|
}
|
|
|
|
|
|
2026-02-15 20:57:16 -05:00
|
|
|
// ============================================================================
|
|
|
|
|
// ExclusiveStartKey Parsing (Pagination Input)
|
|
|
|
|
//
|
|
|
|
|
// Parse ExclusiveStartKey from request body. Requires key_schema so we can
|
|
|
|
|
// validate and extract the key, then convert it to a binary storage key.
|
|
|
|
|
// Returns the binary key bytes that can be passed straight to scan/query.
|
|
|
|
|
// Returns nil (not an error) when the field is absent.
|
|
|
|
|
// ============================================================================
|
|
|
|
|
|
|
|
|
|
parse_exclusive_start_key :: proc(
|
|
|
|
|
request_body: []byte,
|
|
|
|
|
table_name: string,
|
|
|
|
|
key_schema: []Key_Schema_Element,
|
|
|
|
|
) -> (result: Maybe([]byte), ok: bool) {
|
2026-02-15 12:13:46 -05:00
|
|
|
data, parse_err := json.parse(request_body, allocator = context.temp_allocator)
|
|
|
|
|
if parse_err != nil {
|
2026-02-15 20:57:16 -05:00
|
|
|
return nil, true // no ESK is fine
|
2026-02-15 12:13:46 -05:00
|
|
|
}
|
|
|
|
|
defer json.destroy_value(data)
|
2026-02-15 13:56:08 -05:00
|
|
|
|
2026-02-15 20:57:16 -05:00
|
|
|
root, root_ok := data.(json.Object)
|
|
|
|
|
if !root_ok {
|
|
|
|
|
return nil, true
|
2026-02-15 12:13:46 -05:00
|
|
|
}
|
2026-02-15 13:56:08 -05:00
|
|
|
|
2026-02-15 20:57:16 -05:00
|
|
|
esk_val, found := root["ExclusiveStartKey"]
|
2026-02-15 12:13:46 -05:00
|
|
|
if !found {
|
2026-02-15 20:57:16 -05:00
|
|
|
return nil, true // absent → no pagination, that's ok
|
2026-02-15 12:13:46 -05:00
|
|
|
}
|
2026-02-15 13:56:08 -05:00
|
|
|
|
2026-02-15 20:57:16 -05:00
|
|
|
// Parse ExclusiveStartKey as a DynamoDB Item
|
|
|
|
|
key_item, item_ok := parse_item_from_value(esk_val)
|
2026-02-15 12:13:46 -05:00
|
|
|
if !item_ok {
|
2026-02-15 20:57:16 -05:00
|
|
|
return nil, false // present but malformed → real error
|
2026-02-15 12:13:46 -05:00
|
|
|
}
|
|
|
|
|
defer item_destroy(&key_item)
|
2026-02-15 13:56:08 -05:00
|
|
|
|
2026-02-15 20:57:16 -05:00
|
|
|
// Validate and extract key struct using schema
|
|
|
|
|
key_struct, key_ok := key_from_item(key_item, key_schema)
|
|
|
|
|
if !key_ok {
|
|
|
|
|
return nil, false // missing required key attributes
|
|
|
|
|
}
|
|
|
|
|
defer key_destroy(&key_struct)
|
|
|
|
|
|
|
|
|
|
// Get raw byte values
|
|
|
|
|
key_values, kv_ok := key_get_values(&key_struct)
|
|
|
|
|
if !kv_ok {
|
|
|
|
|
return nil, false
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
// Build binary storage key
|
|
|
|
|
binary_key := build_data_key(table_name, key_values.pk, key_values.sk)
|
|
|
|
|
result = binary_key
|
|
|
|
|
ok = true
|
|
|
|
|
return
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
// ============================================================================
|
|
|
|
|
// LastEvaluatedKey Generation (Pagination Output)
|
|
|
|
|
//
|
|
|
|
|
// Decode a binary storage key back into a DynamoDB JSON fragment suitable
|
|
|
|
|
// for the "LastEvaluatedKey" field in scan/query responses.
|
|
|
|
|
//
|
|
|
|
|
// Steps:
|
|
|
|
|
// 1. Decode the binary key → table_name, pk_bytes, sk_bytes
|
|
|
|
|
// 2. Look up attribute types from metadata (S/N/B)
|
|
|
|
|
// 3. Build a Key struct with correctly-typed AttributeValues
|
|
|
|
|
// 4. Convert Key → Item → DynamoDB JSON string
|
|
|
|
|
// ============================================================================
|
|
|
|
|
|
|
|
|
|
// Build a Key struct from a binary storage key using metadata for type info.
|
|
|
|
|
// This mirrors the Zig buildKeyFromBinaryWithTypes helper.
|
|
|
|
|
build_key_from_binary_with_types :: proc(
|
|
|
|
|
binary_key: []byte,
|
|
|
|
|
metadata: ^Table_Metadata,
|
|
|
|
|
) -> (key: Key, ok: bool) {
|
|
|
|
|
decoder := Key_Decoder{data = binary_key, pos = 0}
|
|
|
|
|
|
|
|
|
|
// Skip entity type byte
|
|
|
|
|
_ = decoder_read_entity_type(&decoder) or_return
|
|
|
|
|
|
|
|
|
|
// Skip table name segment
|
|
|
|
|
_ = decoder_read_segment_borrowed(&decoder) or_return
|
|
|
|
|
|
|
|
|
|
// Read partition key bytes
|
|
|
|
|
pk_bytes := decoder_read_segment_borrowed(&decoder) or_return
|
|
|
|
|
|
|
|
|
|
// Read sort key bytes if present
|
|
|
|
|
sk_bytes: Maybe([]byte) = nil
|
|
|
|
|
if decoder_has_more(&decoder) {
|
|
|
|
|
sk := decoder_read_segment_borrowed(&decoder) or_return
|
|
|
|
|
sk_bytes = sk
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
// Get PK attribute type from metadata
|
|
|
|
|
pk_name := table_metadata_get_partition_key_name(metadata).? or_return
|
|
|
|
|
pk_type := table_metadata_get_attribute_type(metadata, pk_name).? or_return
|
|
|
|
|
|
|
|
|
|
pk_attr := build_attribute_value_with_type(pk_bytes, pk_type)
|
|
|
|
|
|
|
|
|
|
// Build SK attribute if present
|
|
|
|
|
sk_attr: Maybe(Attribute_Value) = nil
|
|
|
|
|
if sk, has_sk := sk_bytes.?; has_sk {
|
|
|
|
|
sk_name := table_metadata_get_sort_key_name(metadata).? or_return
|
|
|
|
|
sk_type := table_metadata_get_attribute_type(metadata, sk_name).? or_return
|
|
|
|
|
sk_attr = build_attribute_value_with_type(sk, sk_type)
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
return Key{pk = pk_attr, sk = sk_attr}, true
|
2026-02-15 12:13:46 -05:00
|
|
|
}
|
|
|
|
|
|
2026-02-15 20:57:16 -05:00
|
|
|
// Serialize a binary storage key as a LastEvaluatedKey JSON fragment.
|
|
|
|
|
// Returns a string like: {"pk":{"S":"val"},"sk":{"N":"42"}}
|
|
|
|
|
serialize_last_evaluated_key :: proc(
|
|
|
|
|
binary_key: []byte,
|
|
|
|
|
metadata: ^Table_Metadata,
|
|
|
|
|
) -> (result: string, ok: bool) {
|
|
|
|
|
key, key_ok := build_key_from_binary_with_types(binary_key, metadata)
|
|
|
|
|
if !key_ok {
|
|
|
|
|
return "", false
|
|
|
|
|
}
|
|
|
|
|
defer key_destroy(&key)
|
|
|
|
|
|
|
|
|
|
item := key_to_item(key, metadata.key_schema)
|
2026-02-15 12:13:46 -05:00
|
|
|
defer item_destroy(&item)
|
2026-02-15 13:56:08 -05:00
|
|
|
|
2026-02-15 20:57:16 -05:00
|
|
|
return serialize_item(item), true
|
2026-02-15 12:13:46 -05:00
|
|
|
}
|