2026-02-15 12:13:46 -05:00
|
|
|
// DynamoDB JSON parsing and serialization
|
|
|
|
|
// Pure functions for converting between DynamoDB JSON format and internal types
|
|
|
|
|
package dynamodb
|
|
|
|
|
|
|
|
|
|
import "core:encoding/json"
|
|
|
|
|
import "core:fmt"
|
|
|
|
|
import "core:slice"
|
|
|
|
|
import "core:strings"
|
|
|
|
|
|
|
|
|
|
// ============================================================================
|
|
|
|
|
// Parsing (JSON → Types)
|
|
|
|
|
// ============================================================================
|
|
|
|
|
|
|
|
|
|
// Parse DynamoDB JSON format into an Item
|
|
|
|
|
// Caller owns returned Item
|
|
|
|
|
parse_item :: proc(json_bytes: []byte) -> (Item, bool) {
|
|
|
|
|
data, parse_err := json.parse(json_bytes, allocator = context.allocator)
|
|
|
|
|
if parse_err != nil {
|
|
|
|
|
return {}, false
|
|
|
|
|
}
|
|
|
|
|
defer json.destroy_value(data)
|
2026-02-15 13:56:08 -05:00
|
|
|
|
2026-02-15 12:13:46 -05:00
|
|
|
return parse_item_from_value(data)
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
// Parse an Item from an already-parsed JSON Value
|
|
|
|
|
// More efficient when you already have a Value (e.g., from request body parsing)
|
|
|
|
|
parse_item_from_value :: proc(value: json.Value) -> (Item, bool) {
|
|
|
|
|
obj, ok := value.(json.Object)
|
|
|
|
|
if !ok {
|
|
|
|
|
return {}, false
|
|
|
|
|
}
|
2026-02-15 13:56:08 -05:00
|
|
|
|
2026-02-15 12:13:46 -05:00
|
|
|
item := make(Item)
|
2026-02-15 13:56:08 -05:00
|
|
|
|
2026-02-15 12:13:46 -05:00
|
|
|
for key, val in obj {
|
|
|
|
|
attr_name := strings.clone(key)
|
2026-02-15 13:56:08 -05:00
|
|
|
|
2026-02-15 12:13:46 -05:00
|
|
|
attr_value, attr_ok := parse_attribute_value(val)
|
|
|
|
|
if !attr_ok {
|
|
|
|
|
// Cleanup on error
|
|
|
|
|
for k, v in item {
|
|
|
|
|
delete(k)
|
|
|
|
|
v_copy := v
|
|
|
|
|
attr_value_destroy(&v_copy)
|
|
|
|
|
}
|
|
|
|
|
delete(item)
|
|
|
|
|
delete(attr_name)
|
|
|
|
|
return {}, false
|
|
|
|
|
}
|
2026-02-15 13:56:08 -05:00
|
|
|
|
2026-02-15 12:13:46 -05:00
|
|
|
item[attr_name] = attr_value
|
|
|
|
|
}
|
2026-02-15 13:56:08 -05:00
|
|
|
|
2026-02-15 12:13:46 -05:00
|
|
|
return item, true
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
// Parse a single DynamoDB AttributeValue from JSON
|
|
|
|
|
// Format: {"S": "value"}, {"N": "123"}, {"M": {...}}, etc.
|
|
|
|
|
parse_attribute_value :: proc(value: json.Value) -> (Attribute_Value, bool) {
|
|
|
|
|
obj, ok := value.(json.Object)
|
|
|
|
|
if !ok {
|
|
|
|
|
return nil, false
|
|
|
|
|
}
|
2026-02-15 13:56:08 -05:00
|
|
|
|
2026-02-15 12:13:46 -05:00
|
|
|
// DynamoDB attribute must have exactly one key (the type indicator)
|
|
|
|
|
if len(obj) != 1 {
|
|
|
|
|
return nil, false
|
|
|
|
|
}
|
2026-02-15 13:56:08 -05:00
|
|
|
|
2026-02-15 12:13:46 -05:00
|
|
|
// Get the single key-value pair
|
|
|
|
|
for type_name, type_value in obj {
|
|
|
|
|
// String
|
|
|
|
|
if type_name == "S" {
|
|
|
|
|
str, str_ok := type_value.(json.String)
|
|
|
|
|
if !str_ok {
|
|
|
|
|
return nil, false
|
|
|
|
|
}
|
|
|
|
|
return String(strings.clone(string(str))), true
|
|
|
|
|
}
|
2026-02-15 13:56:08 -05:00
|
|
|
|
2026-02-15 12:13:46 -05:00
|
|
|
// Number (stored as string)
|
|
|
|
|
if type_name == "N" {
|
|
|
|
|
str, str_ok := type_value.(json.String)
|
|
|
|
|
if !str_ok {
|
|
|
|
|
return nil, false
|
|
|
|
|
}
|
|
|
|
|
return Number(strings.clone(string(str))), true
|
|
|
|
|
}
|
2026-02-15 13:56:08 -05:00
|
|
|
|
2026-02-15 12:13:46 -05:00
|
|
|
// Binary (base64 string)
|
|
|
|
|
if type_name == "B" {
|
|
|
|
|
str, str_ok := type_value.(json.String)
|
|
|
|
|
if !str_ok {
|
|
|
|
|
return nil, false
|
|
|
|
|
}
|
|
|
|
|
return Binary(strings.clone(string(str))), true
|
|
|
|
|
}
|
2026-02-15 13:56:08 -05:00
|
|
|
|
2026-02-15 12:13:46 -05:00
|
|
|
// Boolean
|
|
|
|
|
if type_name == "BOOL" {
|
|
|
|
|
b, b_ok := type_value.(json.Boolean)
|
|
|
|
|
if !b_ok {
|
|
|
|
|
return nil, false
|
|
|
|
|
}
|
|
|
|
|
return Bool(b), true
|
|
|
|
|
}
|
2026-02-15 13:56:08 -05:00
|
|
|
|
2026-02-15 12:13:46 -05:00
|
|
|
// Null
|
|
|
|
|
if type_name == "NULL" {
|
|
|
|
|
b, b_ok := type_value.(json.Boolean)
|
|
|
|
|
if !b_ok {
|
|
|
|
|
return nil, false
|
|
|
|
|
}
|
|
|
|
|
return Null(b), true
|
|
|
|
|
}
|
2026-02-15 13:56:08 -05:00
|
|
|
|
2026-02-15 12:13:46 -05:00
|
|
|
// String Set
|
|
|
|
|
if type_name == "SS" {
|
|
|
|
|
arr, arr_ok := type_value.(json.Array)
|
|
|
|
|
if !arr_ok {
|
|
|
|
|
return nil, false
|
|
|
|
|
}
|
2026-02-15 13:56:08 -05:00
|
|
|
|
2026-02-15 12:13:46 -05:00
|
|
|
strings_arr := make([]string, len(arr))
|
2026-02-15 13:56:08 -05:00
|
|
|
|
2026-02-15 12:13:46 -05:00
|
|
|
for item, i in arr {
|
|
|
|
|
str, str_ok := item.(json.String)
|
|
|
|
|
if !str_ok {
|
|
|
|
|
// Cleanup on error
|
|
|
|
|
for j in 0..<i {
|
|
|
|
|
delete(strings_arr[j])
|
|
|
|
|
}
|
|
|
|
|
delete(strings_arr)
|
|
|
|
|
return nil, false
|
|
|
|
|
}
|
|
|
|
|
strings_arr[i] = strings.clone(string(str))
|
|
|
|
|
}
|
2026-02-15 13:56:08 -05:00
|
|
|
|
2026-02-15 12:13:46 -05:00
|
|
|
return String_Set(strings_arr), true
|
|
|
|
|
}
|
2026-02-15 13:56:08 -05:00
|
|
|
|
2026-02-15 12:13:46 -05:00
|
|
|
// Number Set
|
|
|
|
|
if type_name == "NS" {
|
|
|
|
|
arr, arr_ok := type_value.(json.Array)
|
|
|
|
|
if !arr_ok {
|
|
|
|
|
return nil, false
|
|
|
|
|
}
|
2026-02-15 13:56:08 -05:00
|
|
|
|
2026-02-15 12:13:46 -05:00
|
|
|
numbers_arr := make([]string, len(arr))
|
2026-02-15 13:56:08 -05:00
|
|
|
|
2026-02-15 12:13:46 -05:00
|
|
|
for item, i in arr {
|
|
|
|
|
str, str_ok := item.(json.String)
|
|
|
|
|
if !str_ok {
|
|
|
|
|
// Cleanup on error
|
|
|
|
|
for j in 0..<i {
|
|
|
|
|
delete(numbers_arr[j])
|
|
|
|
|
}
|
|
|
|
|
delete(numbers_arr)
|
|
|
|
|
return nil, false
|
|
|
|
|
}
|
|
|
|
|
numbers_arr[i] = strings.clone(string(str))
|
|
|
|
|
}
|
2026-02-15 13:56:08 -05:00
|
|
|
|
2026-02-15 12:13:46 -05:00
|
|
|
return Number_Set(numbers_arr), true
|
|
|
|
|
}
|
2026-02-15 13:56:08 -05:00
|
|
|
|
2026-02-15 12:13:46 -05:00
|
|
|
// Binary Set
|
|
|
|
|
if type_name == "BS" {
|
|
|
|
|
arr, arr_ok := type_value.(json.Array)
|
|
|
|
|
if !arr_ok {
|
|
|
|
|
return nil, false
|
|
|
|
|
}
|
2026-02-15 13:56:08 -05:00
|
|
|
|
2026-02-15 12:13:46 -05:00
|
|
|
binaries_arr := make([]string, len(arr))
|
2026-02-15 13:56:08 -05:00
|
|
|
|
2026-02-15 12:13:46 -05:00
|
|
|
for item, i in arr {
|
|
|
|
|
str, str_ok := item.(json.String)
|
|
|
|
|
if !str_ok {
|
|
|
|
|
// Cleanup on error
|
|
|
|
|
for j in 0..<i {
|
|
|
|
|
delete(binaries_arr[j])
|
|
|
|
|
}
|
|
|
|
|
delete(binaries_arr)
|
|
|
|
|
return nil, false
|
|
|
|
|
}
|
|
|
|
|
binaries_arr[i] = strings.clone(string(str))
|
|
|
|
|
}
|
2026-02-15 13:56:08 -05:00
|
|
|
|
2026-02-15 12:13:46 -05:00
|
|
|
return Binary_Set(binaries_arr), true
|
|
|
|
|
}
|
2026-02-15 13:56:08 -05:00
|
|
|
|
2026-02-15 12:13:46 -05:00
|
|
|
// List
|
|
|
|
|
if type_name == "L" {
|
|
|
|
|
arr, arr_ok := type_value.(json.Array)
|
|
|
|
|
if !arr_ok {
|
|
|
|
|
return nil, false
|
|
|
|
|
}
|
2026-02-15 13:56:08 -05:00
|
|
|
|
2026-02-15 12:13:46 -05:00
|
|
|
list := make([]Attribute_Value, len(arr))
|
2026-02-15 13:56:08 -05:00
|
|
|
|
2026-02-15 12:13:46 -05:00
|
|
|
for item, i in arr {
|
|
|
|
|
val, val_ok := parse_attribute_value(item)
|
|
|
|
|
if !val_ok {
|
|
|
|
|
// Cleanup on error
|
|
|
|
|
for j in 0..<i {
|
|
|
|
|
item_copy := list[j]
|
|
|
|
|
attr_value_destroy(&item_copy)
|
|
|
|
|
}
|
|
|
|
|
delete(list)
|
|
|
|
|
return nil, false
|
|
|
|
|
}
|
|
|
|
|
list[i] = val
|
|
|
|
|
}
|
2026-02-15 13:56:08 -05:00
|
|
|
|
2026-02-15 12:13:46 -05:00
|
|
|
return List(list), true
|
|
|
|
|
}
|
2026-02-15 13:56:08 -05:00
|
|
|
|
2026-02-15 12:13:46 -05:00
|
|
|
// Map
|
|
|
|
|
if type_name == "M" {
|
|
|
|
|
map_obj, map_ok := type_value.(json.Object)
|
|
|
|
|
if !map_ok {
|
|
|
|
|
return nil, false
|
|
|
|
|
}
|
2026-02-15 13:56:08 -05:00
|
|
|
|
2026-02-15 12:13:46 -05:00
|
|
|
attr_map := make(map[string]Attribute_Value)
|
2026-02-15 13:56:08 -05:00
|
|
|
|
2026-02-15 12:13:46 -05:00
|
|
|
for map_key, map_val in map_obj {
|
|
|
|
|
key := strings.clone(map_key)
|
2026-02-15 13:56:08 -05:00
|
|
|
|
2026-02-15 12:13:46 -05:00
|
|
|
val, val_ok := parse_attribute_value(map_val)
|
|
|
|
|
if !val_ok {
|
|
|
|
|
// Cleanup on error
|
|
|
|
|
delete(key)
|
|
|
|
|
for k, v in attr_map {
|
|
|
|
|
delete(k)
|
|
|
|
|
v_copy := v
|
|
|
|
|
attr_value_destroy(&v_copy)
|
|
|
|
|
}
|
|
|
|
|
delete(attr_map)
|
|
|
|
|
return nil, false
|
|
|
|
|
}
|
2026-02-15 13:56:08 -05:00
|
|
|
|
2026-02-15 12:13:46 -05:00
|
|
|
attr_map[key] = val
|
|
|
|
|
}
|
2026-02-15 13:56:08 -05:00
|
|
|
|
2026-02-15 12:13:46 -05:00
|
|
|
return Map(attr_map), true
|
|
|
|
|
}
|
|
|
|
|
}
|
2026-02-15 13:56:08 -05:00
|
|
|
|
2026-02-15 12:13:46 -05:00
|
|
|
return nil, false
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
// ============================================================================
|
|
|
|
|
// Serialization (Types → JSON)
|
|
|
|
|
// ============================================================================
|
|
|
|
|
|
|
|
|
|
// Serialize an Item to canonical DynamoDB JSON format
|
|
|
|
|
// Keys are sorted alphabetically for deterministic output
|
|
|
|
|
serialize_item :: proc(item: Item) -> string {
|
|
|
|
|
builder := strings.builder_make()
|
|
|
|
|
defer strings.builder_destroy(&builder)
|
2026-02-15 13:56:08 -05:00
|
|
|
|
2026-02-15 12:13:46 -05:00
|
|
|
serialize_item_to_builder(&builder, item)
|
2026-02-15 13:56:08 -05:00
|
|
|
|
2026-02-15 12:13:46 -05:00
|
|
|
return strings.clone(strings.to_string(builder))
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
// Serialize an Item to a strings.Builder with deterministic ordering
|
|
|
|
|
serialize_item_to_builder :: proc(b: ^strings.Builder, item: Item) {
|
|
|
|
|
// Collect and sort keys for deterministic output
|
|
|
|
|
keys := make([dynamic]string, context.temp_allocator)
|
|
|
|
|
defer delete(keys)
|
2026-02-15 13:56:08 -05:00
|
|
|
|
2026-02-15 12:13:46 -05:00
|
|
|
for key in item {
|
|
|
|
|
append(&keys, key)
|
|
|
|
|
}
|
2026-02-15 13:56:08 -05:00
|
|
|
|
2026-02-15 12:13:46 -05:00
|
|
|
// Sort keys alphabetically
|
|
|
|
|
slice.sort_by(keys[:], proc(a, b: string) -> bool {
|
|
|
|
|
return a < b
|
|
|
|
|
})
|
2026-02-15 13:56:08 -05:00
|
|
|
|
2026-02-15 12:13:46 -05:00
|
|
|
strings.write_string(b, "{")
|
|
|
|
|
for key, i in keys {
|
|
|
|
|
if i > 0 {
|
|
|
|
|
strings.write_string(b, ",")
|
|
|
|
|
}
|
|
|
|
|
fmt.sbprintf(b, `"%s":`, key)
|
|
|
|
|
value := item[key]
|
|
|
|
|
serialize_attribute_value(b, value)
|
|
|
|
|
}
|
|
|
|
|
strings.write_string(b, "}")
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
// Serialize an AttributeValue to DynamoDB JSON format
|
|
|
|
|
serialize_attribute_value :: proc(b: ^strings.Builder, attr: Attribute_Value) {
|
|
|
|
|
switch v in attr {
|
|
|
|
|
case String:
|
|
|
|
|
fmt.sbprintf(b, `{"S":"%s"}`, string(v))
|
2026-02-15 13:56:08 -05:00
|
|
|
|
2026-02-15 12:13:46 -05:00
|
|
|
case Number:
|
|
|
|
|
fmt.sbprintf(b, `{"N":"%s"}`, string(v))
|
2026-02-15 13:56:08 -05:00
|
|
|
|
2026-02-15 12:13:46 -05:00
|
|
|
case Binary:
|
|
|
|
|
fmt.sbprintf(b, `{"B":"%s"}`, string(v))
|
2026-02-15 13:56:08 -05:00
|
|
|
|
2026-02-15 12:13:46 -05:00
|
|
|
case Bool:
|
|
|
|
|
fmt.sbprintf(b, `{"BOOL":%v}`, bool(v))
|
2026-02-15 13:56:08 -05:00
|
|
|
|
2026-02-15 12:13:46 -05:00
|
|
|
case Null:
|
|
|
|
|
strings.write_string(b, `{"NULL":true}`)
|
2026-02-15 13:56:08 -05:00
|
|
|
|
2026-02-15 12:13:46 -05:00
|
|
|
case String_Set:
|
|
|
|
|
strings.write_string(b, `{"SS":[`)
|
|
|
|
|
for s, i in v {
|
|
|
|
|
if i > 0 {
|
|
|
|
|
strings.write_string(b, ",")
|
|
|
|
|
}
|
|
|
|
|
fmt.sbprintf(b, `"%s"`, s)
|
|
|
|
|
}
|
|
|
|
|
strings.write_string(b, "]}")
|
2026-02-15 13:56:08 -05:00
|
|
|
|
2026-02-15 12:13:46 -05:00
|
|
|
case Number_Set:
|
|
|
|
|
strings.write_string(b, `{"NS":[`)
|
|
|
|
|
for n, i in v {
|
|
|
|
|
if i > 0 {
|
|
|
|
|
strings.write_string(b, ",")
|
|
|
|
|
}
|
|
|
|
|
fmt.sbprintf(b, `"%s"`, n)
|
|
|
|
|
}
|
|
|
|
|
strings.write_string(b, "]}")
|
2026-02-15 13:56:08 -05:00
|
|
|
|
2026-02-15 12:13:46 -05:00
|
|
|
case Binary_Set:
|
|
|
|
|
strings.write_string(b, `{"BS":[`)
|
|
|
|
|
for bin, i in v {
|
|
|
|
|
if i > 0 {
|
|
|
|
|
strings.write_string(b, ",")
|
|
|
|
|
}
|
|
|
|
|
fmt.sbprintf(b, `"%s"`, bin)
|
|
|
|
|
}
|
|
|
|
|
strings.write_string(b, "]}")
|
2026-02-15 13:56:08 -05:00
|
|
|
|
2026-02-15 12:13:46 -05:00
|
|
|
case List:
|
|
|
|
|
strings.write_string(b, `{"L":[`)
|
|
|
|
|
for item, i in v {
|
|
|
|
|
if i > 0 {
|
|
|
|
|
strings.write_string(b, ",")
|
|
|
|
|
}
|
|
|
|
|
serialize_attribute_value(b, item)
|
|
|
|
|
}
|
|
|
|
|
strings.write_string(b, "]}")
|
2026-02-15 13:56:08 -05:00
|
|
|
|
2026-02-15 12:13:46 -05:00
|
|
|
case Map:
|
|
|
|
|
strings.write_string(b, `{"M":{`)
|
2026-02-15 13:56:08 -05:00
|
|
|
|
2026-02-15 12:13:46 -05:00
|
|
|
// Collect and sort keys for deterministic output
|
|
|
|
|
keys := make([dynamic]string, context.temp_allocator)
|
|
|
|
|
for key in v {
|
|
|
|
|
append(&keys, key)
|
|
|
|
|
}
|
2026-02-15 13:56:08 -05:00
|
|
|
|
2026-02-15 12:13:46 -05:00
|
|
|
slice.sort_by(keys[:], proc(a, b: string) -> bool {
|
|
|
|
|
return a < b
|
|
|
|
|
})
|
2026-02-15 13:56:08 -05:00
|
|
|
|
2026-02-15 12:13:46 -05:00
|
|
|
for key, i in keys {
|
|
|
|
|
if i > 0 {
|
|
|
|
|
strings.write_string(b, ",")
|
|
|
|
|
}
|
|
|
|
|
fmt.sbprintf(b, `"%s":`, key)
|
|
|
|
|
value := v[key]
|
|
|
|
|
serialize_attribute_value(b, value)
|
|
|
|
|
}
|
2026-02-15 13:56:08 -05:00
|
|
|
|
2026-02-15 12:13:46 -05:00
|
|
|
strings.write_string(b, "}}")
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
// ============================================================================
|
|
|
|
|
// Request Parsing Helpers
|
|
|
|
|
// ============================================================================
|
|
|
|
|
|
|
|
|
|
// Extract table name from request body
|
|
|
|
|
parse_table_name :: proc(request_body: []byte) -> (string, bool) {
|
|
|
|
|
data, parse_err := json.parse(request_body, allocator = context.temp_allocator)
|
|
|
|
|
if parse_err != nil {
|
|
|
|
|
return "", false
|
|
|
|
|
}
|
|
|
|
|
defer json.destroy_value(data)
|
2026-02-15 13:56:08 -05:00
|
|
|
|
2026-02-15 12:13:46 -05:00
|
|
|
root, ok := data.(json.Object)
|
|
|
|
|
if !ok {
|
|
|
|
|
return "", false
|
|
|
|
|
}
|
2026-02-15 13:56:08 -05:00
|
|
|
|
2026-02-15 12:13:46 -05:00
|
|
|
table_name_val, found := root["TableName"]
|
|
|
|
|
if !found {
|
|
|
|
|
return "", false
|
|
|
|
|
}
|
2026-02-15 13:56:08 -05:00
|
|
|
|
2026-02-15 12:13:46 -05:00
|
|
|
table_name_str, str_ok := table_name_val.(json.String)
|
|
|
|
|
if !str_ok {
|
|
|
|
|
return "", false
|
|
|
|
|
}
|
2026-02-15 13:56:08 -05:00
|
|
|
|
2026-02-15 12:13:46 -05:00
|
|
|
return string(table_name_str), true
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
// Parse Item field from request body
|
|
|
|
|
// Returns owned Item
|
|
|
|
|
parse_item_from_request :: proc(request_body: []byte) -> (Item, bool) {
|
|
|
|
|
data, parse_err := json.parse(request_body, allocator = context.temp_allocator)
|
|
|
|
|
if parse_err != nil {
|
|
|
|
|
return {}, false
|
|
|
|
|
}
|
|
|
|
|
defer json.destroy_value(data)
|
2026-02-15 13:56:08 -05:00
|
|
|
|
2026-02-15 12:13:46 -05:00
|
|
|
root, ok := data.(json.Object)
|
|
|
|
|
if !ok {
|
|
|
|
|
return {}, false
|
|
|
|
|
}
|
2026-02-15 13:56:08 -05:00
|
|
|
|
2026-02-15 12:13:46 -05:00
|
|
|
item_val, found := root["Item"]
|
|
|
|
|
if !found {
|
|
|
|
|
return {}, false
|
|
|
|
|
}
|
2026-02-15 13:56:08 -05:00
|
|
|
|
2026-02-15 12:13:46 -05:00
|
|
|
return parse_item_from_value(item_val)
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
// Parse Key field from request body
|
|
|
|
|
// Returns owned Item representing the key
|
|
|
|
|
parse_key_from_request :: proc(request_body: []byte) -> (Item, bool) {
|
|
|
|
|
data, parse_err := json.parse(request_body, allocator = context.temp_allocator)
|
|
|
|
|
if parse_err != nil {
|
|
|
|
|
return {}, false
|
|
|
|
|
}
|
|
|
|
|
defer json.destroy_value(data)
|
2026-02-15 13:56:08 -05:00
|
|
|
|
2026-02-15 12:13:46 -05:00
|
|
|
root, ok := data.(json.Object)
|
|
|
|
|
if !ok {
|
|
|
|
|
return {}, false
|
|
|
|
|
}
|
2026-02-15 13:56:08 -05:00
|
|
|
|
2026-02-15 12:13:46 -05:00
|
|
|
key_val, found := root["Key"]
|
|
|
|
|
if !found {
|
|
|
|
|
return {}, false
|
|
|
|
|
}
|
2026-02-15 13:56:08 -05:00
|
|
|
|
2026-02-15 12:13:46 -05:00
|
|
|
return parse_item_from_value(key_val)
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
// ============================================================================
|
|
|
|
|
// Pagination Helpers
|
|
|
|
|
// ============================================================================
|
|
|
|
|
|
|
|
|
|
// Parse Limit from request body
|
|
|
|
|
// Returns 0 if not present
|
|
|
|
|
parse_limit :: proc(request_body: []byte) -> int {
|
|
|
|
|
data, parse_err := json.parse(request_body, allocator = context.temp_allocator)
|
|
|
|
|
if parse_err != nil {
|
|
|
|
|
return 0
|
|
|
|
|
}
|
|
|
|
|
defer json.destroy_value(data)
|
2026-02-15 13:56:08 -05:00
|
|
|
|
2026-02-15 12:13:46 -05:00
|
|
|
root, ok := data.(json.Object)
|
|
|
|
|
if !ok {
|
|
|
|
|
return 0
|
|
|
|
|
}
|
2026-02-15 13:56:08 -05:00
|
|
|
|
2026-02-15 12:13:46 -05:00
|
|
|
limit_val, found := root["Limit"]
|
|
|
|
|
if !found {
|
|
|
|
|
return 0
|
|
|
|
|
}
|
2026-02-15 13:56:08 -05:00
|
|
|
|
2026-02-15 12:13:46 -05:00
|
|
|
// JSON numbers can be either Integer or Float
|
|
|
|
|
#partial switch v in limit_val {
|
|
|
|
|
case json.Integer:
|
|
|
|
|
return int(v)
|
|
|
|
|
case json.Float:
|
|
|
|
|
return int(v)
|
|
|
|
|
}
|
2026-02-15 13:56:08 -05:00
|
|
|
|
2026-02-15 12:13:46 -05:00
|
|
|
return 0
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
// Parse ExclusiveStartKey from request body as binary key bytes
|
|
|
|
|
// Returns nil if not present
|
|
|
|
|
parse_exclusive_start_key :: proc(request_body: []byte) -> Maybe([]byte) {
|
|
|
|
|
data, parse_err := json.parse(request_body, allocator = context.temp_allocator)
|
|
|
|
|
if parse_err != nil {
|
|
|
|
|
return nil
|
|
|
|
|
}
|
|
|
|
|
defer json.destroy_value(data)
|
2026-02-15 13:56:08 -05:00
|
|
|
|
2026-02-15 12:13:46 -05:00
|
|
|
root, ok := data.(json.Object)
|
|
|
|
|
if !ok {
|
|
|
|
|
return nil
|
|
|
|
|
}
|
2026-02-15 13:56:08 -05:00
|
|
|
|
2026-02-15 12:13:46 -05:00
|
|
|
key_val, found := root["ExclusiveStartKey"]
|
|
|
|
|
if !found {
|
|
|
|
|
return nil
|
|
|
|
|
}
|
2026-02-15 13:56:08 -05:00
|
|
|
|
2026-02-15 12:13:46 -05:00
|
|
|
// Parse as Item first
|
|
|
|
|
key_item, item_ok := parse_item_from_value(key_val)
|
|
|
|
|
if !item_ok {
|
|
|
|
|
return nil
|
|
|
|
|
}
|
|
|
|
|
defer item_destroy(&key_item)
|
2026-02-15 13:56:08 -05:00
|
|
|
|
2026-02-15 12:13:46 -05:00
|
|
|
// Convert to binary key bytes (this will be done by the storage layer)
|
|
|
|
|
// For now, just return nil - the storage layer will handle the conversion
|
|
|
|
|
return nil
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
// Serialize a Key as ExclusiveStartKey for response
|
|
|
|
|
serialize_last_evaluated_key :: proc(key: Key) -> string {
|
|
|
|
|
item := key_to_item(key, {}) // Empty key_schema since we don't need validation here
|
|
|
|
|
defer item_destroy(&item)
|
2026-02-15 13:56:08 -05:00
|
|
|
|
2026-02-15 12:13:46 -05:00
|
|
|
return serialize_item(item)
|
|
|
|
|
}
|