Files

821 lines
20 KiB
Odin
Raw Permalink Normal View History

2026-02-15 12:13:46 -05:00
// DynamoDB JSON parsing and serialization
// Pure functions for converting between DynamoDB JSON format and internal types
package dynamodb
import "core:encoding/json"
import "core:fmt"
import "core:slice"
import "core:strings"
// ============================================================================
// Parsing (JSON → Types)
// ============================================================================
// Parse DynamoDB JSON format into an Item
// Caller owns returned Item
parse_item :: proc(json_bytes: []byte) -> (Item, bool) {
data, parse_err := json.parse(json_bytes, allocator = context.allocator)
if parse_err != nil {
return {}, false
}
defer json.destroy_value(data)
2026-02-15 13:56:08 -05:00
2026-02-15 12:13:46 -05:00
return parse_item_from_value(data)
}
// Parse an Item from an already-parsed JSON Value
// More efficient when you already have a Value (e.g., from request body parsing)
parse_item_from_value :: proc(value: json.Value) -> (Item, bool) {
obj, ok := value.(json.Object)
if !ok {
return {}, false
}
2026-02-15 13:56:08 -05:00
2026-02-15 12:13:46 -05:00
item := make(Item)
2026-02-15 13:56:08 -05:00
2026-02-15 12:13:46 -05:00
for key, val in obj {
attr_name := strings.clone(key)
2026-02-15 13:56:08 -05:00
2026-02-15 12:13:46 -05:00
attr_value, attr_ok := parse_attribute_value(val)
if !attr_ok {
// Cleanup on error
for k, v in item {
delete(k)
v_copy := v
attr_value_destroy(&v_copy)
}
delete(item)
delete(attr_name)
return {}, false
}
2026-02-15 13:56:08 -05:00
2026-02-15 12:13:46 -05:00
item[attr_name] = attr_value
}
2026-02-15 13:56:08 -05:00
2026-02-15 12:13:46 -05:00
return item, true
}
// Parse a single DynamoDB AttributeValue from JSON
// Format: {"S": "value"}, {"N": "123"}, {"M": {...}}, etc.
parse_attribute_value :: proc(value: json.Value) -> (Attribute_Value, bool) {
obj, ok := value.(json.Object)
if !ok {
return nil, false
}
2026-02-15 13:56:08 -05:00
2026-02-15 12:13:46 -05:00
// DynamoDB attribute must have exactly one key (the type indicator)
if len(obj) != 1 {
return nil, false
}
2026-02-15 13:56:08 -05:00
2026-02-15 12:13:46 -05:00
// Get the single key-value pair
for type_name, type_value in obj {
// String
if type_name == "S" {
str, str_ok := type_value.(json.String)
if !str_ok {
return nil, false
}
return String(strings.clone(string(str))), true
}
2026-02-15 13:56:08 -05:00
2026-02-15 12:13:46 -05:00
// Number (stored as string)
if type_name == "N" {
str, str_ok := type_value.(json.String)
if !str_ok {
return nil, false
}
// Parse into DDB_Number
ddb_num, num_ok := parse_ddb_number(string(str))
if !num_ok {
return nil, false
}
// Clone the string fields since they're slices of the input
owned_num := clone_ddb_number(ddb_num)
return owned_num, true
2026-02-15 12:13:46 -05:00
}
2026-02-15 13:56:08 -05:00
2026-02-15 12:13:46 -05:00
// Binary (base64 string)
if type_name == "B" {
str, str_ok := type_value.(json.String)
if !str_ok {
return nil, false
}
return Binary(strings.clone(string(str))), true
}
2026-02-15 13:56:08 -05:00
2026-02-15 12:13:46 -05:00
// Boolean
if type_name == "BOOL" {
b, b_ok := type_value.(json.Boolean)
if !b_ok {
return nil, false
}
return Bool(b), true
}
2026-02-15 13:56:08 -05:00
2026-02-15 12:13:46 -05:00
// Null
if type_name == "NULL" {
b, b_ok := type_value.(json.Boolean)
if !b_ok {
return nil, false
}
return Null(b), true
}
2026-02-15 13:56:08 -05:00
2026-02-15 12:13:46 -05:00
// String Set
if type_name == "SS" {
arr, arr_ok := type_value.(json.Array)
if !arr_ok {
return nil, false
}
2026-02-15 13:56:08 -05:00
2026-02-15 12:13:46 -05:00
strings_arr := make([]string, len(arr))
2026-02-15 13:56:08 -05:00
2026-02-15 12:13:46 -05:00
for item, i in arr {
str, str_ok := item.(json.String)
if !str_ok {
// Cleanup on error
for j in 0..<i {
delete(strings_arr[j])
}
delete(strings_arr)
return nil, false
}
strings_arr[i] = strings.clone(string(str))
}
2026-02-15 13:56:08 -05:00
2026-02-15 12:13:46 -05:00
return String_Set(strings_arr), true
}
2026-02-15 13:56:08 -05:00
2026-02-15 12:13:46 -05:00
// Number Set
if type_name == "NS" {
arr, arr_ok := type_value.(json.Array)
if !arr_ok {
return nil, false
}
2026-02-15 13:56:08 -05:00
numbers_arr := make([]DDB_Number, len(arr))
2026-02-15 13:56:08 -05:00
2026-02-15 12:13:46 -05:00
for item, i in arr {
str, str_ok := item.(json.String)
if !str_ok {
// Cleanup on error
for j in 0..<i {
// Clean up DDB_Numbers
delete(numbers_arr[j].integer_part)
delete(numbers_arr[j].fractional_part)
}
delete(numbers_arr)
return nil, false
}
// Parse into DDB_Number
ddb_num, num_ok := parse_ddb_number(string(str))
if !num_ok {
// Cleanup on error
for j in 0..<i {
delete(numbers_arr[j].integer_part)
delete(numbers_arr[j].fractional_part)
2026-02-15 12:13:46 -05:00
}
delete(numbers_arr)
return nil, false
}
// Clone and store
numbers_arr[i] = clone_ddb_number(ddb_num)
2026-02-15 12:13:46 -05:00
}
2026-02-15 13:56:08 -05:00
return DDB_Number_Set(numbers_arr), true
2026-02-15 12:13:46 -05:00
}
2026-02-15 13:56:08 -05:00
2026-02-15 12:13:46 -05:00
// Binary Set
if type_name == "BS" {
arr, arr_ok := type_value.(json.Array)
if !arr_ok {
return nil, false
}
2026-02-15 13:56:08 -05:00
2026-02-15 12:13:46 -05:00
binaries_arr := make([]string, len(arr))
2026-02-15 13:56:08 -05:00
2026-02-15 12:13:46 -05:00
for item, i in arr {
str, str_ok := item.(json.String)
if !str_ok {
// Cleanup on error
for j in 0..<i {
delete(binaries_arr[j])
}
delete(binaries_arr)
return nil, false
}
binaries_arr[i] = strings.clone(string(str))
}
2026-02-15 13:56:08 -05:00
2026-02-15 12:13:46 -05:00
return Binary_Set(binaries_arr), true
}
2026-02-15 13:56:08 -05:00
2026-02-15 12:13:46 -05:00
// List
if type_name == "L" {
arr, arr_ok := type_value.(json.Array)
if !arr_ok {
return nil, false
}
2026-02-15 13:56:08 -05:00
2026-02-15 12:13:46 -05:00
list := make([]Attribute_Value, len(arr))
2026-02-15 13:56:08 -05:00
2026-02-15 12:13:46 -05:00
for item, i in arr {
val, val_ok := parse_attribute_value(item)
if !val_ok {
// Cleanup on error
for j in 0..<i {
item_copy := list[j]
attr_value_destroy(&item_copy)
}
delete(list)
return nil, false
}
list[i] = val
}
2026-02-15 13:56:08 -05:00
2026-02-15 12:13:46 -05:00
return List(list), true
}
2026-02-15 13:56:08 -05:00
2026-02-15 12:13:46 -05:00
// Map
if type_name == "M" {
map_obj, map_ok := type_value.(json.Object)
if !map_ok {
return nil, false
}
2026-02-15 13:56:08 -05:00
2026-02-15 12:13:46 -05:00
attr_map := make(map[string]Attribute_Value)
2026-02-15 13:56:08 -05:00
2026-02-15 12:13:46 -05:00
for map_key, map_val in map_obj {
key := strings.clone(map_key)
2026-02-15 13:56:08 -05:00
2026-02-15 12:13:46 -05:00
val, val_ok := parse_attribute_value(map_val)
if !val_ok {
// Cleanup on error
delete(key)
for k, v in attr_map {
delete(k)
v_copy := v
attr_value_destroy(&v_copy)
}
delete(attr_map)
return nil, false
}
2026-02-15 13:56:08 -05:00
2026-02-15 12:13:46 -05:00
attr_map[key] = val
}
2026-02-15 13:56:08 -05:00
2026-02-15 12:13:46 -05:00
return Map(attr_map), true
}
}
2026-02-15 13:56:08 -05:00
2026-02-15 12:13:46 -05:00
return nil, false
}
// ============================================================================
// Serialization (Types → JSON)
// ============================================================================
// Serialize an Item to canonical DynamoDB JSON format
// Keys are sorted alphabetically for deterministic output
serialize_item :: proc(item: Item) -> string {
builder := strings.builder_make()
defer strings.builder_destroy(&builder)
2026-02-15 13:56:08 -05:00
2026-02-15 12:13:46 -05:00
serialize_item_to_builder(&builder, item)
2026-02-15 13:56:08 -05:00
2026-02-15 12:13:46 -05:00
return strings.clone(strings.to_string(builder))
}
// Serialize an Item to a strings.Builder with deterministic ordering
serialize_item_to_builder :: proc(b: ^strings.Builder, item: Item) {
// Collect and sort keys for deterministic output
keys := make([dynamic]string, context.temp_allocator)
defer delete(keys)
2026-02-15 13:56:08 -05:00
2026-02-15 12:13:46 -05:00
for key in item {
append(&keys, key)
}
2026-02-15 13:56:08 -05:00
2026-02-15 12:13:46 -05:00
// Sort keys alphabetically
slice.sort_by(keys[:], proc(a, b: string) -> bool {
return a < b
})
2026-02-15 13:56:08 -05:00
2026-02-15 12:13:46 -05:00
strings.write_string(b, "{")
for key, i in keys {
if i > 0 {
strings.write_string(b, ",")
}
fmt.sbprintf(b, `"%s":`, key)
value := item[key]
serialize_attribute_value(b, value)
}
strings.write_string(b, "}")
}
// Serialize an AttributeValue to DynamoDB JSON format
serialize_attribute_value :: proc(b: ^strings.Builder, attr: Attribute_Value) {
switch v in attr {
case String:
2026-02-17 09:57:35 -05:00
strings.write_string(b, `{"S":"`)
strings.write_string(b, string(v))
strings.write_string(b, `"}`)
2026-02-15 13:56:08 -05:00
case DDB_Number:
num_str := format_ddb_number(v)
2026-02-17 09:57:35 -05:00
strings.write_string(b, `{"N":"`)
strings.write_string(b, num_str)
strings.write_string(b, `"}`)
2026-02-15 12:13:46 -05:00
case Binary:
2026-02-17 09:57:35 -05:00
strings.write_string(b, `{"B":"`)
strings.write_string(b, string(v))
strings.write_string(b, `"}`)
2026-02-15 13:56:08 -05:00
2026-02-15 12:13:46 -05:00
case Bool:
2026-02-17 09:57:35 -05:00
strings.write_string(b, `{"BOOL":`)
if bool(v) { strings.write_string(b, "true") } else { strings.write_string(b, "false") }
strings.write_string(b, "}")
2026-02-15 13:56:08 -05:00
2026-02-15 12:13:46 -05:00
case Null:
strings.write_string(b, `{"NULL":true}`)
2026-02-15 13:56:08 -05:00
2026-02-15 12:13:46 -05:00
case String_Set:
strings.write_string(b, `{"SS":[`)
for s, i in v {
if i > 0 {
strings.write_string(b, ",")
}
fmt.sbprintf(b, `"%s"`, s)
}
strings.write_string(b, "]}")
2026-02-15 13:56:08 -05:00
case DDB_Number_Set:
strings.write_string(b, `{"NS":[`)
for num, i in v {
if i > 0 {
strings.write_string(b, ",")
}
num_str := format_ddb_number(num)
fmt.sbprintf(b, `"%s"`, num_str)
}
strings.write_string(b, "]}")
2026-02-15 12:13:46 -05:00
case Binary_Set:
strings.write_string(b, `{"BS":[`)
for bin, i in v {
if i > 0 {
strings.write_string(b, ",")
}
fmt.sbprintf(b, `"%s"`, bin)
}
strings.write_string(b, "]}")
2026-02-15 13:56:08 -05:00
2026-02-15 12:13:46 -05:00
case List:
strings.write_string(b, `{"L":[`)
for item, i in v {
if i > 0 {
strings.write_string(b, ",")
}
serialize_attribute_value(b, item)
}
strings.write_string(b, "]}")
2026-02-15 13:56:08 -05:00
2026-02-15 12:13:46 -05:00
case Map:
strings.write_string(b, `{"M":{`)
2026-02-15 13:56:08 -05:00
2026-02-15 12:13:46 -05:00
// Collect and sort keys for deterministic output
keys := make([dynamic]string, context.temp_allocator)
for key in v {
append(&keys, key)
}
2026-02-15 13:56:08 -05:00
2026-02-15 12:13:46 -05:00
slice.sort_by(keys[:], proc(a, b: string) -> bool {
return a < b
})
2026-02-15 13:56:08 -05:00
2026-02-15 12:13:46 -05:00
for key, i in keys {
if i > 0 {
strings.write_string(b, ",")
}
fmt.sbprintf(b, `"%s":`, key)
value := v[key]
serialize_attribute_value(b, value)
}
2026-02-15 13:56:08 -05:00
2026-02-15 12:13:46 -05:00
strings.write_string(b, "}}")
}
}
// ============================================================================
// Request Parsing Helpers
// ============================================================================
// Extract table name from request body
parse_table_name :: proc(request_body: []byte) -> (string, bool) {
data, parse_err := json.parse(request_body, allocator = context.temp_allocator)
if parse_err != nil {
return "", false
}
defer json.destroy_value(data)
2026-02-15 13:56:08 -05:00
2026-02-15 12:13:46 -05:00
root, ok := data.(json.Object)
if !ok {
return "", false
}
2026-02-15 13:56:08 -05:00
2026-02-15 12:13:46 -05:00
table_name_val, found := root["TableName"]
if !found {
return "", false
}
2026-02-15 13:56:08 -05:00
2026-02-15 12:13:46 -05:00
table_name_str, str_ok := table_name_val.(json.String)
if !str_ok {
return "", false
}
2026-02-15 13:56:08 -05:00
2026-02-17 02:03:40 -05:00
return strings.clone(string(table_name_str)), true
2026-02-15 12:13:46 -05:00
}
// Parse Item field from request body
// Returns owned Item
parse_item_from_request :: proc(request_body: []byte) -> (Item, bool) {
data, parse_err := json.parse(request_body, allocator = context.temp_allocator)
if parse_err != nil {
return {}, false
}
defer json.destroy_value(data)
2026-02-15 13:56:08 -05:00
2026-02-15 12:13:46 -05:00
root, ok := data.(json.Object)
if !ok {
return {}, false
}
2026-02-15 13:56:08 -05:00
2026-02-15 12:13:46 -05:00
item_val, found := root["Item"]
if !found {
return {}, false
}
2026-02-15 13:56:08 -05:00
2026-02-15 12:13:46 -05:00
return parse_item_from_value(item_val)
}
// Parse Key field from request body
// Returns owned Item representing the key
parse_key_from_request :: proc(request_body: []byte) -> (Item, bool) {
data, parse_err := json.parse(request_body, allocator = context.temp_allocator)
if parse_err != nil {
return {}, false
}
defer json.destroy_value(data)
2026-02-15 13:56:08 -05:00
2026-02-15 12:13:46 -05:00
root, ok := data.(json.Object)
if !ok {
return {}, false
}
2026-02-15 13:56:08 -05:00
2026-02-15 12:13:46 -05:00
key_val, found := root["Key"]
if !found {
return {}, false
}
2026-02-15 13:56:08 -05:00
2026-02-15 12:13:46 -05:00
return parse_item_from_value(key_val)
}
// ============================================================================
// Pagination Helpers
// ============================================================================
// Parse Limit from request body
// Returns 0 if not present
parse_limit :: proc(request_body: []byte) -> int {
data, parse_err := json.parse(request_body, allocator = context.temp_allocator)
if parse_err != nil {
return 0
}
defer json.destroy_value(data)
2026-02-15 13:56:08 -05:00
2026-02-15 12:13:46 -05:00
root, ok := data.(json.Object)
if !ok {
return 0
}
2026-02-15 13:56:08 -05:00
2026-02-15 12:13:46 -05:00
limit_val, found := root["Limit"]
if !found {
return 0
}
2026-02-15 13:56:08 -05:00
2026-02-15 12:13:46 -05:00
// JSON numbers can be either Integer or Float
#partial switch v in limit_val {
case json.Integer:
return int(v)
case json.Float:
return int(v)
}
2026-02-15 13:56:08 -05:00
2026-02-15 12:13:46 -05:00
return 0
}
2026-02-15 20:57:16 -05:00
// ============================================================================
// ExclusiveStartKey Parsing (Pagination Input)
//
// Parse ExclusiveStartKey from request body. Requires key_schema so we can
// validate and extract the key, then convert it to a binary storage key.
// Returns the binary key bytes that can be passed straight to scan/query.
// Returns nil (not an error) when the field is absent.
// ============================================================================
2026-02-21 20:50:14 -05:00
// Returns (key, ok, body_parse_err).
// ok=true, body_parse_err=false → key present and valid, or key absent (no pagination)
// ok=false, body_parse_err=true → request body is not valid JSON or not an object
// ok=false, body_parse_err=false → ExclusiveStartKey present but malformed/invalid
2026-02-15 20:57:16 -05:00
parse_exclusive_start_key :: proc(
request_body: []byte,
table_name: string,
key_schema: []Key_Schema_Element,
2026-02-21 20:50:14 -05:00
) -> (result: Maybe([]byte), ok: bool, body_err: bool) {
2026-02-15 12:13:46 -05:00
data, parse_err := json.parse(request_body, allocator = context.temp_allocator)
if parse_err != nil {
2026-02-21 20:50:14 -05:00
return nil, false, true // body is not valid JSON — real error
2026-02-15 12:13:46 -05:00
}
defer json.destroy_value(data)
2026-02-15 13:56:08 -05:00
2026-02-15 20:57:16 -05:00
root, root_ok := data.(json.Object)
if !root_ok {
2026-02-21 20:50:14 -05:00
return nil, false, true // root must be an object — real error
2026-02-15 12:13:46 -05:00
}
2026-02-15 13:56:08 -05:00
2026-02-15 20:57:16 -05:00
esk_val, found := root["ExclusiveStartKey"]
2026-02-15 12:13:46 -05:00
if !found {
2026-02-21 20:50:14 -05:00
return nil, true, false // absent → no pagination, that's ok
2026-02-15 12:13:46 -05:00
}
2026-02-15 13:56:08 -05:00
2026-02-15 20:57:16 -05:00
// Parse ExclusiveStartKey as a DynamoDB Item
key_item, item_ok := parse_item_from_value(esk_val)
2026-02-15 12:13:46 -05:00
if !item_ok {
2026-02-21 20:50:14 -05:00
return nil, false, false // present but malformed → validation error
2026-02-15 12:13:46 -05:00
}
defer item_destroy(&key_item)
2026-02-15 13:56:08 -05:00
2026-02-15 20:57:16 -05:00
// Validate and extract key struct using schema
key_struct, key_ok := key_from_item(key_item, key_schema)
if !key_ok {
2026-02-21 20:50:14 -05:00
return nil, false, false // missing required key attributes
2026-02-15 20:57:16 -05:00
}
defer key_destroy(&key_struct)
// Get raw byte values
key_values, kv_ok := key_get_values(&key_struct)
if !kv_ok {
2026-02-21 20:50:14 -05:00
return nil, false, false
2026-02-15 20:57:16 -05:00
}
// Build binary storage key
binary_key := build_data_key(table_name, key_values.pk, key_values.sk)
result = binary_key
ok = true
return
}
2026-02-17 12:36:38 -05:00
// parse_exclusive_start_key_gsi ... Just a helper for GSI keys
2026-02-21 20:50:14 -05:00
// Returns (key, ok, body_parse_err) — same contract as parse_exclusive_start_key.
2026-02-17 12:36:38 -05:00
parse_exclusive_start_key_gsi :: proc(
request_body: []byte,
table_name: string,
metadata: ^Table_Metadata,
gsi: ^Global_Secondary_Index,
2026-02-21 20:50:14 -05:00
) -> (Maybe([]byte), bool, bool) {
root, parse_err := json.parse(request_body)
if parse_err != nil do return nil, false, true // body not valid JSON
2026-02-17 12:36:38 -05:00
defer json.destroy_value(root)
obj, obj_ok := root.(json.Object)
2026-02-21 20:50:14 -05:00
if !obj_ok do return nil, false, true // root must be an object
2026-02-17 12:36:38 -05:00
esk_val, has := obj["ExclusiveStartKey"]
2026-02-21 20:50:14 -05:00
if !has do return nil, true, false // absent → no pagination
2026-02-17 12:36:38 -05:00
key_item, key_ok := parse_item_from_value(esk_val)
2026-02-21 20:50:14 -05:00
if !key_ok do return nil, false, false
2026-02-17 12:36:38 -05:00
defer item_destroy(&key_item)
idx_key, idx_ok := key_from_item(key_item, gsi.key_schema)
2026-02-21 20:50:14 -05:00
if !idx_ok do return nil, false, false
2026-02-17 12:36:38 -05:00
defer key_destroy(&idx_key)
idx_vals, idx_vals_ok := key_get_values(&idx_key)
2026-02-21 20:50:14 -05:00
if !idx_vals_ok do return nil, false, false
2026-02-17 12:36:38 -05:00
base_key, base_ok := key_from_item(key_item, metadata.key_schema)
2026-02-21 20:50:14 -05:00
if !base_ok do return nil, false, false
2026-02-17 12:36:38 -05:00
defer key_destroy(&base_key)
base_vals, base_vals_ok := key_get_values(&base_key)
2026-02-21 20:50:14 -05:00
if !base_vals_ok do return nil, false, false
2026-02-17 12:36:38 -05:00
k := build_gsi_key(
table_name,
gsi.index_name,
idx_vals.pk,
idx_vals.sk,
base_vals.pk,
base_vals.sk,
)
2026-02-21 20:50:14 -05:00
return k, true, false
2026-02-17 12:36:38 -05:00
}
2026-02-15 20:57:16 -05:00
// ============================================================================
// LastEvaluatedKey Generation (Pagination Output)
//
// Decode a binary storage key back into a DynamoDB JSON fragment suitable
// for the "LastEvaluatedKey" field in scan/query responses.
//
// Steps:
// 1. Decode the binary key → table_name, pk_bytes, sk_bytes
// 2. Look up attribute types from metadata (S/N/B)
// 3. Build a Key struct with correctly-typed AttributeValues
// 4. Convert Key → Item → DynamoDB JSON string
// ============================================================================
// Build a Key struct from a binary storage key using metadata for type info.
// This mirrors the Zig buildKeyFromBinaryWithTypes helper.
build_key_from_binary_with_types :: proc(
binary_key: []byte,
metadata: ^Table_Metadata,
) -> (key: Key, ok: bool) {
decoder := Key_Decoder{data = binary_key, pos = 0}
// Skip entity type byte
_ = decoder_read_entity_type(&decoder) or_return
// Skip table name segment
_ = decoder_read_segment_borrowed(&decoder) or_return
// Read partition key bytes
pk_bytes := decoder_read_segment_borrowed(&decoder) or_return
// Read sort key bytes if present
sk_bytes: Maybe([]byte) = nil
if decoder_has_more(&decoder) {
sk := decoder_read_segment_borrowed(&decoder) or_return
sk_bytes = sk
}
// Get PK attribute type from metadata
pk_name := table_metadata_get_partition_key_name(metadata).? or_return
pk_type := table_metadata_get_attribute_type(metadata, pk_name).? or_return
pk_attr := build_attribute_value_with_type(pk_bytes, pk_type)
// Build SK attribute if present
sk_attr: Maybe(Attribute_Value) = nil
if sk, has_sk := sk_bytes.?; has_sk {
sk_name := table_metadata_get_sort_key_name(metadata).? or_return
sk_type := table_metadata_get_attribute_type(metadata, sk_name).? or_return
sk_attr = build_attribute_value_with_type(sk, sk_type)
}
return Key{pk = pk_attr, sk = sk_attr}, true
2026-02-15 12:13:46 -05:00
}
2026-02-15 20:57:16 -05:00
// Serialize a binary storage key as a LastEvaluatedKey JSON fragment.
// Returns a string like: {"pk":{"S":"val"},"sk":{"N":"42"}}
serialize_last_evaluated_key :: proc(
binary_key: []byte,
metadata: ^Table_Metadata,
) -> (result: string, ok: bool) {
key, key_ok := build_key_from_binary_with_types(binary_key, metadata)
if !key_ok {
return "", false
}
defer key_destroy(&key)
item := key_to_item(key, metadata.key_schema)
2026-02-15 12:13:46 -05:00
defer item_destroy(&item)
2026-02-15 13:56:08 -05:00
2026-02-15 20:57:16 -05:00
return serialize_item(item), true
2026-02-15 12:13:46 -05:00
}
2026-02-17 12:36:38 -05:00
Decoded_GSI_Key_Full :: struct {
gsi_pk: []byte,
gsi_sk: Maybe([]byte),
base_pk: []byte,
base_sk: Maybe([]byte),
}
// Decode binary GSI key:
//
// [gsi][table_name][index_name][gsi_pk][gsi_sk?][base_pk][base_sk?]
//
// Presence of gsi_sk/base_sk depends on whether the index/table has a RANGE key.
decode_gsi_key_full_borrowed :: proc(
binary_key: []byte,
gsi_has_sort_key: bool,
table_has_sort_key: bool,
) -> (result: Decoded_GSI_Key_Full, ok: bool) {
decoder := Key_Decoder{data = binary_key, pos = 0}
et := decoder_read_entity_type(&decoder) or_return
if et != .GSI {
return {}, false
}
// Skip table name + index name
_ = decoder_read_segment_borrowed(&decoder) or_return
_ = decoder_read_segment_borrowed(&decoder) or_return
// Read GSI PK
result.gsi_pk = decoder_read_segment_borrowed(&decoder) or_return
// Read GSI SK if index has one
if gsi_has_sort_key {
sk := decoder_read_segment_borrowed(&decoder) or_return
result.gsi_sk = sk
}
// Read base PK
result.base_pk = decoder_read_segment_borrowed(&decoder) or_return
// Read base SK if table has one
if table_has_sort_key {
sk := decoder_read_segment_borrowed(&decoder) or_return
result.base_sk = sk
}
return result, true
}
// Serialize a binary *GSI* key into a DynamoDB LastEvaluatedKey JSON object.
// The output must include the *index* key attrs + the *base table* primary key attrs,
// so boto can round-trip ExclusiveStartKey correctly.
serialize_last_evaluated_key_gsi :: proc(
binary_key: []byte,
metadata: ^Table_Metadata,
gsi: ^Global_Secondary_Index,
) -> (result: string, ok: bool) {
// Determine whether index/table have range keys
_, gsi_has_sk := gsi_get_sort_key_name(gsi).?
_, tbl_has_sk := table_metadata_get_sort_key_name(metadata).?
decoded, dec_ok := decode_gsi_key_full_borrowed(binary_key, gsi_has_sk, tbl_has_sk)
if !dec_ok {
return "", false
}
// Resolve key attribute names + types
idx_pk_name := gsi_get_partition_key_name(gsi).? or_return
idx_pk_type := table_metadata_get_attribute_type(metadata, idx_pk_name).? or_return
idx_sk_name: Maybe(string) = gsi_get_sort_key_name(gsi)
idx_sk_type: Maybe(Scalar_Attribute_Type) = nil
if n, has := idx_sk_name.?; has {
idx_sk_type = table_metadata_get_attribute_type(metadata, n)
}
base_pk_name := table_metadata_get_partition_key_name(metadata).? or_return
base_pk_type := table_metadata_get_attribute_type(metadata, base_pk_name).? or_return
base_sk_name: Maybe(string) = table_metadata_get_sort_key_name(metadata)
base_sk_type: Maybe(Scalar_Attribute_Type) = nil
if n, has := base_sk_name.?; has {
base_sk_type = table_metadata_get_attribute_type(metadata, n)
}
// Build LEK item
lek := make(Item)
defer item_destroy(&lek)
add_attr_once :: proc(item: ^Item, name: string, raw: []byte, t: Scalar_Attribute_Type) {
if _, exists := item^[name]; exists {
return
}
item^[strings.clone(name)] = build_attribute_value_with_type(raw, t)
}
// Index keys
add_attr_once(&lek, idx_pk_name, decoded.gsi_pk, idx_pk_type)
if sk_raw, has := decoded.gsi_sk.?; has {
skn := idx_sk_name.? or_return
skt := idx_sk_type.? or_return
add_attr_once(&lek, skn, sk_raw, skt)
}
// Base table keys
add_attr_once(&lek, base_pk_name, decoded.base_pk, base_pk_type)
if sk_raw, has := decoded.base_sk.?; has {
skn := base_sk_name.? or_return
skt := base_sk_type.? or_return
add_attr_once(&lek, skn, sk_raw, skt)
}
return serialize_item(lek), true
}