From c4da5ecc14cf3baf49f20569ed963eae64f6e8c8 Mon Sep 17 00:00:00 2001 From: biondizzle Date: Sun, 15 Feb 2026 12:13:46 -0500 Subject: [PATCH] convert json parser from zig to odin --- .gitignore | 3 +- dynamodb/json.odin | 526 +++++++++++++++++++++++++++++++++++++++++++++ 2 files changed, 528 insertions(+), 1 deletion(-) create mode 100644 dynamodb/json.odin diff --git a/.gitignore b/.gitignore index 8642a81..7390937 100644 --- a/.gitignore +++ b/.gitignore @@ -1,3 +1,4 @@ build data -project_context.txt \ No newline at end of file +project_context.txt +jormundb-odin-project_context.txt \ No newline at end of file diff --git a/dynamodb/json.odin b/dynamodb/json.odin new file mode 100644 index 0000000..cc74f35 --- /dev/null +++ b/dynamodb/json.odin @@ -0,0 +1,526 @@ +// DynamoDB JSON parsing and serialization +// Pure functions for converting between DynamoDB JSON format and internal types +package dynamodb + +import "core:encoding/json" +import "core:fmt" +import "core:mem" +import "core:slice" +import "core:strings" + +// ============================================================================ +// Parsing (JSON → Types) +// ============================================================================ + +// Parse DynamoDB JSON format into an Item +// Caller owns returned Item +parse_item :: proc(json_bytes: []byte) -> (Item, bool) { + data, parse_err := json.parse(json_bytes, allocator = context.allocator) + if parse_err != nil { + return {}, false + } + defer json.destroy_value(data) + + return parse_item_from_value(data) +} + +// Parse an Item from an already-parsed JSON Value +// More efficient when you already have a Value (e.g., from request body parsing) +parse_item_from_value :: proc(value: json.Value) -> (Item, bool) { + obj, ok := value.(json.Object) + if !ok { + return {}, false + } + + item := make(Item) + + for key, val in obj { + attr_name := strings.clone(key) + + attr_value, attr_ok := parse_attribute_value(val) + if !attr_ok { + // Cleanup on error + for k, v in item { + delete(k) + v_copy := v + attr_value_destroy(&v_copy) + } + delete(item) + delete(attr_name) + return {}, false + } + + item[attr_name] = attr_value + } + + return item, true +} + +// Parse a single DynamoDB AttributeValue from JSON +// Format: {"S": "value"}, {"N": "123"}, {"M": {...}}, etc. +parse_attribute_value :: proc(value: json.Value) -> (Attribute_Value, bool) { + obj, ok := value.(json.Object) + if !ok { + return nil, false + } + + // DynamoDB attribute must have exactly one key (the type indicator) + if len(obj) != 1 { + return nil, false + } + + // Get the single key-value pair + for type_name, type_value in obj { + // String + if type_name == "S" { + str, str_ok := type_value.(json.String) + if !str_ok { + return nil, false + } + return String(strings.clone(string(str))), true + } + + // Number (stored as string) + if type_name == "N" { + str, str_ok := type_value.(json.String) + if !str_ok { + return nil, false + } + return Number(strings.clone(string(str))), true + } + + // Binary (base64 string) + if type_name == "B" { + str, str_ok := type_value.(json.String) + if !str_ok { + return nil, false + } + return Binary(strings.clone(string(str))), true + } + + // Boolean + if type_name == "BOOL" { + b, b_ok := type_value.(json.Boolean) + if !b_ok { + return nil, false + } + return Bool(b), true + } + + // Null + if type_name == "NULL" { + b, b_ok := type_value.(json.Boolean) + if !b_ok { + return nil, false + } + return Null(b), true + } + + // String Set + if type_name == "SS" { + arr, arr_ok := type_value.(json.Array) + if !arr_ok { + return nil, false + } + + strings_arr := make([]string, len(arr)) + + for item, i in arr { + str, str_ok := item.(json.String) + if !str_ok { + // Cleanup on error + for j in 0.. string { + builder := strings.builder_make() + defer strings.builder_destroy(&builder) + + serialize_item_to_builder(&builder, item) + + return strings.clone(strings.to_string(builder)) +} + +// Serialize an Item to a strings.Builder with deterministic ordering +serialize_item_to_builder :: proc(b: ^strings.Builder, item: Item) { + // Collect and sort keys for deterministic output + keys := make([dynamic]string, context.temp_allocator) + defer delete(keys) + + for key in item { + append(&keys, key) + } + + // Sort keys alphabetically + slice.sort_by(keys[:], proc(a, b: string) -> bool { + return a < b + }) + + strings.write_string(b, "{") + for key, i in keys { + if i > 0 { + strings.write_string(b, ",") + } + fmt.sbprintf(b, `"%s":`, key) + value := item[key] + serialize_attribute_value(b, value) + } + strings.write_string(b, "}") +} + +// Serialize an AttributeValue to DynamoDB JSON format +serialize_attribute_value :: proc(b: ^strings.Builder, attr: Attribute_Value) { + switch v in attr { + case String: + fmt.sbprintf(b, `{"S":"%s"}`, string(v)) + + case Number: + fmt.sbprintf(b, `{"N":"%s"}`, string(v)) + + case Binary: + fmt.sbprintf(b, `{"B":"%s"}`, string(v)) + + case Bool: + fmt.sbprintf(b, `{"BOOL":%v}`, bool(v)) + + case Null: + strings.write_string(b, `{"NULL":true}`) + + case String_Set: + strings.write_string(b, `{"SS":[`) + for s, i in v { + if i > 0 { + strings.write_string(b, ",") + } + fmt.sbprintf(b, `"%s"`, s) + } + strings.write_string(b, "]}") + + case Number_Set: + strings.write_string(b, `{"NS":[`) + for n, i in v { + if i > 0 { + strings.write_string(b, ",") + } + fmt.sbprintf(b, `"%s"`, n) + } + strings.write_string(b, "]}") + + case Binary_Set: + strings.write_string(b, `{"BS":[`) + for bin, i in v { + if i > 0 { + strings.write_string(b, ",") + } + fmt.sbprintf(b, `"%s"`, bin) + } + strings.write_string(b, "]}") + + case List: + strings.write_string(b, `{"L":[`) + for item, i in v { + if i > 0 { + strings.write_string(b, ",") + } + serialize_attribute_value(b, item) + } + strings.write_string(b, "]}") + + case Map: + strings.write_string(b, `{"M":{`) + + // Collect and sort keys for deterministic output + keys := make([dynamic]string, context.temp_allocator) + for key in v { + append(&keys, key) + } + + slice.sort_by(keys[:], proc(a, b: string) -> bool { + return a < b + }) + + for key, i in keys { + if i > 0 { + strings.write_string(b, ",") + } + fmt.sbprintf(b, `"%s":`, key) + value := v[key] + serialize_attribute_value(b, value) + } + + strings.write_string(b, "}}") + } +} + +// ============================================================================ +// Request Parsing Helpers +// ============================================================================ + +// Extract table name from request body +parse_table_name :: proc(request_body: []byte) -> (string, bool) { + data, parse_err := json.parse(request_body, allocator = context.temp_allocator) + if parse_err != nil { + return "", false + } + defer json.destroy_value(data) + + root, ok := data.(json.Object) + if !ok { + return "", false + } + + table_name_val, found := root["TableName"] + if !found { + return "", false + } + + table_name_str, str_ok := table_name_val.(json.String) + if !str_ok { + return "", false + } + + return string(table_name_str), true +} + +// Parse Item field from request body +// Returns owned Item +parse_item_from_request :: proc(request_body: []byte) -> (Item, bool) { + data, parse_err := json.parse(request_body, allocator = context.temp_allocator) + if parse_err != nil { + return {}, false + } + defer json.destroy_value(data) + + root, ok := data.(json.Object) + if !ok { + return {}, false + } + + item_val, found := root["Item"] + if !found { + return {}, false + } + + return parse_item_from_value(item_val) +} + +// Parse Key field from request body +// Returns owned Item representing the key +parse_key_from_request :: proc(request_body: []byte) -> (Item, bool) { + data, parse_err := json.parse(request_body, allocator = context.temp_allocator) + if parse_err != nil { + return {}, false + } + defer json.destroy_value(data) + + root, ok := data.(json.Object) + if !ok { + return {}, false + } + + key_val, found := root["Key"] + if !found { + return {}, false + } + + return parse_item_from_value(key_val) +} + +// ============================================================================ +// Pagination Helpers +// ============================================================================ + +// Parse Limit from request body +// Returns 0 if not present +parse_limit :: proc(request_body: []byte) -> int { + data, parse_err := json.parse(request_body, allocator = context.temp_allocator) + if parse_err != nil { + return 0 + } + defer json.destroy_value(data) + + root, ok := data.(json.Object) + if !ok { + return 0 + } + + limit_val, found := root["Limit"] + if !found { + return 0 + } + + // JSON numbers can be either Integer or Float + #partial switch v in limit_val { + case json.Integer: + return int(v) + case json.Float: + return int(v) + } + + return 0 +} + +// Parse ExclusiveStartKey from request body as binary key bytes +// Returns nil if not present +parse_exclusive_start_key :: proc(request_body: []byte) -> Maybe([]byte) { + data, parse_err := json.parse(request_body, allocator = context.temp_allocator) + if parse_err != nil { + return nil + } + defer json.destroy_value(data) + + root, ok := data.(json.Object) + if !ok { + return nil + } + + key_val, found := root["ExclusiveStartKey"] + if !found { + return nil + } + + // Parse as Item first + key_item, item_ok := parse_item_from_value(key_val) + if !item_ok { + return nil + } + defer item_destroy(&key_item) + + // Convert to binary key bytes (this will be done by the storage layer) + // For now, just return nil - the storage layer will handle the conversion + return nil +} + +// Serialize a Key as ExclusiveStartKey for response +serialize_last_evaluated_key :: proc(key: Key) -> string { + item := key_to_item(key, {}) // Empty key_schema since we don't need validation here + defer item_destroy(&item) + + return serialize_item(item) +}