// DynamoDB JSON parsing and serialization // Pure functions for converting between DynamoDB JSON format and internal types package dynamodb import "core:encoding/json" import "core:fmt" import "core:slice" import "core:strings" // ============================================================================ // Parsing (JSON → Types) // ============================================================================ // Parse DynamoDB JSON format into an Item // Caller owns returned Item parse_item :: proc(json_bytes: []byte) -> (Item, bool) { data, parse_err := json.parse(json_bytes, allocator = context.allocator) if parse_err != nil { return {}, false } defer json.destroy_value(data) return parse_item_from_value(data) } // Parse an Item from an already-parsed JSON Value // More efficient when you already have a Value (e.g., from request body parsing) parse_item_from_value :: proc(value: json.Value) -> (Item, bool) { obj, ok := value.(json.Object) if !ok { return {}, false } item := make(Item) for key, val in obj { attr_name := strings.clone(key) attr_value, attr_ok := parse_attribute_value(val) if !attr_ok { // Cleanup on error for k, v in item { delete(k) v_copy := v attr_value_destroy(&v_copy) } delete(item) delete(attr_name) return {}, false } item[attr_name] = attr_value } return item, true } // Parse a single DynamoDB AttributeValue from JSON // Format: {"S": "value"}, {"N": "123"}, {"M": {...}}, etc. parse_attribute_value :: proc(value: json.Value) -> (Attribute_Value, bool) { obj, ok := value.(json.Object) if !ok { return nil, false } // DynamoDB attribute must have exactly one key (the type indicator) if len(obj) != 1 { return nil, false } // Get the single key-value pair for type_name, type_value in obj { // String if type_name == "S" { str, str_ok := type_value.(json.String) if !str_ok { return nil, false } return String(strings.clone(string(str))), true } // Number (stored as string) if type_name == "N" { str, str_ok := type_value.(json.String) if !str_ok { return nil, false } // Parse into DDB_Number ddb_num, num_ok := parse_ddb_number(string(str)) if !num_ok { return nil, false } // Clone the string fields since they're slices of the input owned_num := clone_ddb_number(ddb_num) return owned_num, true } // Binary (base64 string) if type_name == "B" { str, str_ok := type_value.(json.String) if !str_ok { return nil, false } return Binary(strings.clone(string(str))), true } // Boolean if type_name == "BOOL" { b, b_ok := type_value.(json.Boolean) if !b_ok { return nil, false } return Bool(b), true } // Null if type_name == "NULL" { b, b_ok := type_value.(json.Boolean) if !b_ok { return nil, false } return Null(b), true } // String Set if type_name == "SS" { arr, arr_ok := type_value.(json.Array) if !arr_ok { return nil, false } strings_arr := make([]string, len(arr)) for item, i in arr { str, str_ok := item.(json.String) if !str_ok { // Cleanup on error for j in 0.. string { builder := strings.builder_make() defer strings.builder_destroy(&builder) serialize_item_to_builder(&builder, item) return strings.clone(strings.to_string(builder)) } // Serialize an Item to a strings.Builder with deterministic ordering serialize_item_to_builder :: proc(b: ^strings.Builder, item: Item) { // Collect and sort keys for deterministic output keys := make([dynamic]string, context.temp_allocator) defer delete(keys) for key in item { append(&keys, key) } // Sort keys alphabetically slice.sort_by(keys[:], proc(a, b: string) -> bool { return a < b }) strings.write_string(b, "{") for key, i in keys { if i > 0 { strings.write_string(b, ",") } fmt.sbprintf(b, `"%s":`, key) value := item[key] serialize_attribute_value(b, value) } strings.write_string(b, "}") } // Serialize an AttributeValue to DynamoDB JSON format serialize_attribute_value :: proc(b: ^strings.Builder, attr: Attribute_Value) { switch v in attr { case String: strings.write_string(b, `{"S":"`) strings.write_string(b, string(v)) strings.write_string(b, `"}`) case DDB_Number: num_str := format_ddb_number(v) strings.write_string(b, `{"N":"`) strings.write_string(b, num_str) strings.write_string(b, `"}`) case Binary: strings.write_string(b, `{"B":"`) strings.write_string(b, string(v)) strings.write_string(b, `"}`) case Bool: strings.write_string(b, `{"BOOL":`) if bool(v) { strings.write_string(b, "true") } else { strings.write_string(b, "false") } strings.write_string(b, "}") case Null: strings.write_string(b, `{"NULL":true}`) case String_Set: strings.write_string(b, `{"SS":[`) for s, i in v { if i > 0 { strings.write_string(b, ",") } fmt.sbprintf(b, `"%s"`, s) } strings.write_string(b, "]}") case DDB_Number_Set: strings.write_string(b, `{"NS":[`) for num, i in v { if i > 0 { strings.write_string(b, ",") } num_str := format_ddb_number(num) fmt.sbprintf(b, `"%s"`, num_str) } strings.write_string(b, "]}") case Binary_Set: strings.write_string(b, `{"BS":[`) for bin, i in v { if i > 0 { strings.write_string(b, ",") } fmt.sbprintf(b, `"%s"`, bin) } strings.write_string(b, "]}") case List: strings.write_string(b, `{"L":[`) for item, i in v { if i > 0 { strings.write_string(b, ",") } serialize_attribute_value(b, item) } strings.write_string(b, "]}") case Map: strings.write_string(b, `{"M":{`) // Collect and sort keys for deterministic output keys := make([dynamic]string, context.temp_allocator) for key in v { append(&keys, key) } slice.sort_by(keys[:], proc(a, b: string) -> bool { return a < b }) for key, i in keys { if i > 0 { strings.write_string(b, ",") } fmt.sbprintf(b, `"%s":`, key) value := v[key] serialize_attribute_value(b, value) } strings.write_string(b, "}}") } } // ============================================================================ // Request Parsing Helpers // ============================================================================ // Extract table name from request body parse_table_name :: proc(request_body: []byte) -> (string, bool) { data, parse_err := json.parse(request_body, allocator = context.temp_allocator) if parse_err != nil { return "", false } defer json.destroy_value(data) root, ok := data.(json.Object) if !ok { return "", false } table_name_val, found := root["TableName"] if !found { return "", false } table_name_str, str_ok := table_name_val.(json.String) if !str_ok { return "", false } return strings.clone(string(table_name_str)), true } // Parse Item field from request body // Returns owned Item parse_item_from_request :: proc(request_body: []byte) -> (Item, bool) { data, parse_err := json.parse(request_body, allocator = context.temp_allocator) if parse_err != nil { return {}, false } defer json.destroy_value(data) root, ok := data.(json.Object) if !ok { return {}, false } item_val, found := root["Item"] if !found { return {}, false } return parse_item_from_value(item_val) } // Parse Key field from request body // Returns owned Item representing the key parse_key_from_request :: proc(request_body: []byte) -> (Item, bool) { data, parse_err := json.parse(request_body, allocator = context.temp_allocator) if parse_err != nil { return {}, false } defer json.destroy_value(data) root, ok := data.(json.Object) if !ok { return {}, false } key_val, found := root["Key"] if !found { return {}, false } return parse_item_from_value(key_val) } // ============================================================================ // Pagination Helpers // ============================================================================ // Parse Limit from request body // Returns 0 if not present parse_limit :: proc(request_body: []byte) -> int { data, parse_err := json.parse(request_body, allocator = context.temp_allocator) if parse_err != nil { return 0 } defer json.destroy_value(data) root, ok := data.(json.Object) if !ok { return 0 } limit_val, found := root["Limit"] if !found { return 0 } // JSON numbers can be either Integer or Float #partial switch v in limit_val { case json.Integer: return int(v) case json.Float: return int(v) } return 0 } // ============================================================================ // ExclusiveStartKey Parsing (Pagination Input) // // Parse ExclusiveStartKey from request body. Requires key_schema so we can // validate and extract the key, then convert it to a binary storage key. // Returns the binary key bytes that can be passed straight to scan/query. // Returns nil (not an error) when the field is absent. // ============================================================================ parse_exclusive_start_key :: proc( request_body: []byte, table_name: string, key_schema: []Key_Schema_Element, ) -> (result: Maybe([]byte), ok: bool) { data, parse_err := json.parse(request_body, allocator = context.temp_allocator) if parse_err != nil { return nil, true // no ESK is fine } defer json.destroy_value(data) root, root_ok := data.(json.Object) if !root_ok { return nil, true } esk_val, found := root["ExclusiveStartKey"] if !found { return nil, true // absent → no pagination, that's ok } // Parse ExclusiveStartKey as a DynamoDB Item key_item, item_ok := parse_item_from_value(esk_val) if !item_ok { return nil, false // present but malformed → real error } defer item_destroy(&key_item) // Validate and extract key struct using schema key_struct, key_ok := key_from_item(key_item, key_schema) if !key_ok { return nil, false // missing required key attributes } defer key_destroy(&key_struct) // Get raw byte values key_values, kv_ok := key_get_values(&key_struct) if !kv_ok { return nil, false } // Build binary storage key binary_key := build_data_key(table_name, key_values.pk, key_values.sk) result = binary_key ok = true return } // ============================================================================ // LastEvaluatedKey Generation (Pagination Output) // // Decode a binary storage key back into a DynamoDB JSON fragment suitable // for the "LastEvaluatedKey" field in scan/query responses. // // Steps: // 1. Decode the binary key → table_name, pk_bytes, sk_bytes // 2. Look up attribute types from metadata (S/N/B) // 3. Build a Key struct with correctly-typed AttributeValues // 4. Convert Key → Item → DynamoDB JSON string // ============================================================================ // Build a Key struct from a binary storage key using metadata for type info. // This mirrors the Zig buildKeyFromBinaryWithTypes helper. build_key_from_binary_with_types :: proc( binary_key: []byte, metadata: ^Table_Metadata, ) -> (key: Key, ok: bool) { decoder := Key_Decoder{data = binary_key, pos = 0} // Skip entity type byte _ = decoder_read_entity_type(&decoder) or_return // Skip table name segment _ = decoder_read_segment_borrowed(&decoder) or_return // Read partition key bytes pk_bytes := decoder_read_segment_borrowed(&decoder) or_return // Read sort key bytes if present sk_bytes: Maybe([]byte) = nil if decoder_has_more(&decoder) { sk := decoder_read_segment_borrowed(&decoder) or_return sk_bytes = sk } // Get PK attribute type from metadata pk_name := table_metadata_get_partition_key_name(metadata).? or_return pk_type := table_metadata_get_attribute_type(metadata, pk_name).? or_return pk_attr := build_attribute_value_with_type(pk_bytes, pk_type) // Build SK attribute if present sk_attr: Maybe(Attribute_Value) = nil if sk, has_sk := sk_bytes.?; has_sk { sk_name := table_metadata_get_sort_key_name(metadata).? or_return sk_type := table_metadata_get_attribute_type(metadata, sk_name).? or_return sk_attr = build_attribute_value_with_type(sk, sk_type) } return Key{pk = pk_attr, sk = sk_attr}, true } // Serialize a binary storage key as a LastEvaluatedKey JSON fragment. // Returns a string like: {"pk":{"S":"val"},"sk":{"N":"42"}} serialize_last_evaluated_key :: proc( binary_key: []byte, metadata: ^Table_Metadata, ) -> (result: string, ok: bool) { key, key_ok := build_key_from_binary_with_types(binary_key, metadata) if !key_ok { return "", false } defer key_destroy(&key) item := key_to_item(key, metadata.key_schema) defer item_destroy(&item) return serialize_item(item), true }