actually make filters work right

This commit is contained in:
2026-02-15 23:38:48 -05:00
parent b510c000ec
commit c6a78ca054
7 changed files with 2193 additions and 26 deletions

10
TODO.md
View File

@@ -67,14 +67,14 @@ Goal: "aws cli works reliably for CreateTable/ListTables/PutItem/GetItem/DeleteI
## Next (feature parity with Zig + API completeness)
### 5) UpdateItem / conditional logic groundwork
- [x] `UpdateItem` handler registered in router (currently returns clear "not yet supported" error)
- [ ] Implement `UpdateItem` (initially minimal: SET for scalar attrs)
- [x] Implement `UpdateItem` (initially minimal: SET for scalar attrs)
- [ ] Add `ConditionExpression` support for Put/Delete/Update (start with simple comparisons)
- [ ] Define internal "update plan" representation (parsed ops → applied mutations)
- [x] Define internal "update plan" representation (parsed ops → applied mutations)
### 6) Response completeness / options
- [ ] `ReturnValues` handling where relevant (NONE/ALL_OLD/UPDATED_NEW etc. — even partial support is useful)
- [ ] `ProjectionExpression` (return subset of attributes)
- [ ] `FilterExpression` (post-query filter for Scan/Query)
- [x] `ReturnValues` handling where relevant (NONE/ALL_OLD/UPDATED_NEW etc. — even partial support is useful)
- [x] `ProjectionExpression` (return subset of attributes)
- [x] `FilterExpression` (post-query filter for Scan/Query)
### 7) Test coverage / tooling
- [ ] Add integration tests mirroring AWS CLI script flows:

View File

@@ -144,7 +144,7 @@ is_ident_char :: proc(c: byte) -> bool {
// Helper: convert Maybe(string) tokens into (string, bool) so or_return works.
// ---------------------------------------------------------------------------
@(private = "file")
// Package-visible: used by update.odin and filter.odin
next_token :: proc(t: ^Tokenizer) -> (tok: string, ok: bool) {
if v, has := tokenizer_next(t).?; has {
tok = v
@@ -319,7 +319,7 @@ parse_operator :: proc(token: string) -> (Sort_Key_Operator, bool) {
return .EQ, false
}
@(private = "file")
// Package-visible: used by update.odin and filter.odin
resolve_attribute_name :: proc(token: string, names: Maybe(map[string]string)) -> (string, bool) {
if len(token) > 0 && token[0] == '#' {
if n, has_names := names.?; has_names {
@@ -332,7 +332,7 @@ resolve_attribute_name :: proc(token: string, names: Maybe(map[string]string)) -
return token, true
}
@(private = "file")
// Package-visible: used by update.odin and filter.odin
resolve_attribute_value :: proc(
token: string,
values: map[string]Attribute_Value,

813
dynamodb/filter.odin Normal file
View File

@@ -0,0 +1,813 @@
// FilterExpression and ProjectionExpression support
// FilterExpression: post-retrieval filter applied to Scan/Query results
// ProjectionExpression: return only specified attributes from items
package dynamodb
import "core:encoding/json"
import "core:strconv"
import "core:strings"
// ============================================================================
// ProjectionExpression
//
// A comma-separated list of attribute names (with optional #name substitution)
// that specifies which attributes to return.
// ============================================================================
parse_projection_expression :: proc(
request_body: []byte,
attribute_names: Maybe(map[string]string),
) -> (paths: []string, ok: bool) {
data, parse_err := json.parse(request_body, allocator = context.temp_allocator)
if parse_err != nil {
return nil, false
}
defer json.destroy_value(data)
root, root_ok := data.(json.Object)
if !root_ok {
return nil, false
}
pe_val, found := root["ProjectionExpression"]
if !found {
return nil, false // absent is not an error, caller should check
}
pe_str, str_ok := pe_val.(json.String)
if !str_ok {
return nil, false
}
// Split by comma and resolve names
parts := strings.split(string(pe_str), ",")
result := make([dynamic]string)
for part in parts {
trimmed := strings.trim_space(part)
if len(trimmed) == 0 {
continue
}
resolved, res_ok := resolve_attribute_name(trimmed, attribute_names)
if !res_ok {
delete(result)
return nil, false
}
append(&result, resolved)
}
return result[:], true
}
// Apply projection to a single item — returns a new item with only the specified attributes
apply_projection :: proc(item: Item, projection: []string) -> Item {
if len(projection) == 0 {
// No projection — return a deep copy of the full item
return item_deep_copy(item)
}
projected := make(Item)
for path in projection {
if val, found := item[path]; found {
projected[strings.clone(path)] = attr_value_deep_copy(val)
}
}
return projected
}
// Deep copy an entire item
item_deep_copy :: proc(item: Item) -> Item {
result := make(Item)
for key, val in item {
result[strings.clone(key)] = attr_value_deep_copy(val)
}
return result
}
// ============================================================================
// FilterExpression
//
// A condition expression applied post-retrieval. Supports:
// - Comparisons: path = :val, path <> :val, path < :val, etc.
// - BETWEEN: path BETWEEN :lo AND :hi
// - IN: path IN (:v1, :v2, :v3)
// - begins_with: begins_with(path, :prefix)
// - contains: contains(path, :substr)
// - attribute_exists(path)
// - attribute_not_exists(path)
// - AND / OR / NOT combinators
//
// This is a recursive-descent parser for condition expressions.
// ============================================================================
// Parsed filter node (expression tree)
Filter_Node_Type :: enum {
Comparison, // path op value
Between, // path BETWEEN lo AND hi
In, // path IN (v1, v2, ...)
Begins_With, // begins_with(path, value)
Contains, // contains(path, value)
Attribute_Exists, // attribute_exists(path)
Attribute_Not_Exists, // attribute_not_exists(path)
And, // left AND right
Or, // left OR right
Not, // NOT child
}
Comparison_Op :: enum {
EQ, // =
NE, // <>
LT, // <
LE, // <=
GT, // >
GE, // >=
}
Filter_Node :: struct {
type: Filter_Node_Type,
// For Comparison
path: string,
comp_op: Comparison_Op,
value: Attribute_Value,
// For Between
value2: Maybe(Attribute_Value),
// For In
in_values: []Attribute_Value,
// For And/Or
left: ^Filter_Node,
right: ^Filter_Node,
// For Not
child: ^Filter_Node,
}
filter_node_destroy :: proc(node: ^Filter_Node) {
if node == nil {
return
}
attr_value_destroy(&node.value)
if v2, ok := node.value2.?; ok {
v2_copy := v2
attr_value_destroy(&v2_copy)
}
for &iv in node.in_values {
attr_value_destroy(&iv)
}
if node.in_values != nil {
delete(node.in_values)
}
if node.left != nil {
filter_node_destroy(node.left)
free(node.left)
}
if node.right != nil {
filter_node_destroy(node.right)
free(node.right)
}
if node.child != nil {
filter_node_destroy(node.child)
free(node.child)
}
}
// ============================================================================
// Filter Expression Parser
// ============================================================================
parse_filter_expression :: proc(
expression: string,
attribute_names: Maybe(map[string]string),
attribute_values: map[string]Attribute_Value,
) -> (node: ^Filter_Node, ok: bool) {
t := tokenizer_init(expression)
node, ok = parse_or_expr(&t, attribute_names, attribute_values)
return
}
parse_or_expr :: proc(
t: ^Tokenizer,
names: Maybe(map[string]string),
values: map[string]Attribute_Value,
) -> (^Filter_Node, bool) {
left, left_ok := parse_and_expr(t, names, values)
if !left_ok {
return nil, false
}
for {
saved_pos := t.pos
tok_maybe := tokenizer_next(t)
tok, has := tok_maybe.?
if !has {
break
}
if strings.equal_fold(tok, "OR") {
right, right_ok := parse_and_expr(t, names, values)
if !right_ok {
filter_node_destroy(left)
free(left)
return nil, false
}
parent := new(Filter_Node)
parent.type = .Or
parent.left = left
parent.right = right
left = parent
} else {
t.pos = saved_pos
break
}
}
return left, true
}
parse_and_expr :: proc(
t: ^Tokenizer,
names: Maybe(map[string]string),
values: map[string]Attribute_Value,
) -> (^Filter_Node, bool) {
left, left_ok := parse_not_expr(t, names, values)
if !left_ok {
return nil, false
}
for {
saved_pos := t.pos
tok_maybe := tokenizer_next(t)
tok, has := tok_maybe.?
if !has {
break
}
if strings.equal_fold(tok, "AND") {
right, right_ok := parse_not_expr(t, names, values)
if !right_ok {
filter_node_destroy(left)
free(left)
return nil, false
}
parent := new(Filter_Node)
parent.type = .And
parent.left = left
parent.right = right
left = parent
} else {
t.pos = saved_pos
break
}
}
return left, true
}
parse_not_expr :: proc(
t: ^Tokenizer,
names: Maybe(map[string]string),
values: map[string]Attribute_Value,
) -> (^Filter_Node, bool) {
saved_pos := t.pos
tok_maybe := tokenizer_next(t)
tok, has := tok_maybe.?
if !has {
return nil, false
}
if strings.equal_fold(tok, "NOT") {
child, child_ok := parse_primary_expr(t, names, values)
if !child_ok {
return nil, false
}
node := new(Filter_Node)
node.type = .Not
node.child = child
return node, true
}
t.pos = saved_pos
return parse_primary_expr(t, names, values)
}
parse_primary_expr :: proc(
t: ^Tokenizer,
names: Maybe(map[string]string),
values: map[string]Attribute_Value,
) -> (^Filter_Node, bool) {
first_tok, first_ok := next_token(t)
if !first_ok {
return nil, false
}
// Parenthesized expression
if first_tok == "(" {
inner, inner_ok := parse_or_expr(t, names, values)
if !inner_ok {
return nil, false
}
rparen, rp_ok := next_token(t)
if !rp_ok || rparen != ")" {
filter_node_destroy(inner)
free(inner)
return nil, false
}
return inner, true
}
// Function-style: begins_with, contains, attribute_exists, attribute_not_exists
if strings.equal_fold(first_tok, "begins_with") {
return parse_filter_begins_with(t, names, values)
}
if strings.equal_fold(first_tok, "contains") {
return parse_filter_contains(t, names, values)
}
if strings.equal_fold(first_tok, "attribute_exists") {
return parse_filter_attr_exists(t, names, true)
}
if strings.equal_fold(first_tok, "attribute_not_exists") {
return parse_filter_attr_exists(t, names, false)
}
// Comparison, BETWEEN, or IN: path op value
path, path_ok := resolve_attribute_name(first_tok, names)
if !path_ok {
return nil, false
}
op_tok, op_ok := next_token(t)
if !op_ok {
return nil, false
}
// BETWEEN
if strings.equal_fold(op_tok, "BETWEEN") {
return parse_filter_between(t, path, names, values)
}
// IN
if strings.equal_fold(op_tok, "IN") {
return parse_filter_in(t, path, names, values)
}
// Comparison operators
comp_op: Comparison_Op
if op_tok == "=" {
comp_op = .EQ
} else if op_tok == "<>" {
comp_op = .NE
} else if op_tok == "<" {
comp_op = .LT
} else if op_tok == "<=" {
comp_op = .LE
} else if op_tok == ">" {
comp_op = .GT
} else if op_tok == ">=" {
comp_op = .GE
} else {
return nil, false
}
val_tok, vt_ok := next_token(t)
if !vt_ok {
return nil, false
}
val, val_ok := resolve_attribute_value(val_tok, values)
if !val_ok {
return nil, false
}
node := new(Filter_Node)
node.type = .Comparison
node.path = path
node.comp_op = comp_op
node.value = val
return node, true
}
parse_filter_begins_with :: proc(
t: ^Tokenizer,
names: Maybe(map[string]string),
values: map[string]Attribute_Value,
) -> (^Filter_Node, bool) {
lparen, lp_ok := next_token(t)
if !lp_ok || lparen != "(" {
return nil, false
}
path_tok, path_ok := next_token(t)
if !path_ok {
return nil, false
}
path, path_resolved := resolve_attribute_name(path_tok, names)
if !path_resolved {
return nil, false
}
comma, comma_ok := next_token(t)
if !comma_ok || comma != "," {
return nil, false
}
val_tok, vt_ok := next_token(t)
if !vt_ok {
return nil, false
}
val, val_ok := resolve_attribute_value(val_tok, values)
if !val_ok {
return nil, false
}
rparen, rp_ok := next_token(t)
if !rp_ok || rparen != ")" {
attr_value_destroy(&val)
return nil, false
}
node := new(Filter_Node)
node.type = .Begins_With
node.path = path
node.value = val
return node, true
}
parse_filter_contains :: proc(
t: ^Tokenizer,
names: Maybe(map[string]string),
values: map[string]Attribute_Value,
) -> (^Filter_Node, bool) {
lparen, lp_ok := next_token(t)
if !lp_ok || lparen != "(" {
return nil, false
}
path_tok, path_ok := next_token(t)
if !path_ok {
return nil, false
}
path, path_resolved := resolve_attribute_name(path_tok, names)
if !path_resolved {
return nil, false
}
comma, comma_ok := next_token(t)
if !comma_ok || comma != "," {
return nil, false
}
val_tok, vt_ok := next_token(t)
if !vt_ok {
return nil, false
}
val, val_ok := resolve_attribute_value(val_tok, values)
if !val_ok {
return nil, false
}
rparen, rp_ok := next_token(t)
if !rp_ok || rparen != ")" {
attr_value_destroy(&val)
return nil, false
}
node := new(Filter_Node)
node.type = .Contains
node.path = path
node.value = val
return node, true
}
parse_filter_attr_exists :: proc(
t: ^Tokenizer,
names: Maybe(map[string]string),
exists: bool,
) -> (^Filter_Node, bool) {
lparen, lp_ok := next_token(t)
if !lp_ok || lparen != "(" {
return nil, false
}
path_tok, path_ok := next_token(t)
if !path_ok {
return nil, false
}
path, path_resolved := resolve_attribute_name(path_tok, names)
if !path_resolved {
return nil, false
}
rparen, rp_ok := next_token(t)
if !rp_ok || rparen != ")" {
return nil, false
}
node := new(Filter_Node)
node.type = .Attribute_Exists if exists else .Attribute_Not_Exists
node.path = path
return node, true
}
parse_filter_between :: proc(
t: ^Tokenizer,
path: string,
names: Maybe(map[string]string),
values: map[string]Attribute_Value,
) -> (^Filter_Node, bool) {
lo_tok, lo_ok := next_token(t)
if !lo_ok {
return nil, false
}
lo_val, lo_val_ok := resolve_attribute_value(lo_tok, values)
if !lo_val_ok {
return nil, false
}
and_tok, and_ok := next_token(t)
if !and_ok || !strings.equal_fold(and_tok, "AND") {
attr_value_destroy(&lo_val)
return nil, false
}
hi_tok, hi_ok := next_token(t)
if !hi_ok {
attr_value_destroy(&lo_val)
return nil, false
}
hi_val, hi_val_ok := resolve_attribute_value(hi_tok, values)
if !hi_val_ok {
attr_value_destroy(&lo_val)
return nil, false
}
node := new(Filter_Node)
node.type = .Between
node.path = path
node.value = lo_val
node.value2 = hi_val
return node, true
}
parse_filter_in :: proc(
t: ^Tokenizer,
path: string,
names: Maybe(map[string]string),
values: map[string]Attribute_Value,
) -> (^Filter_Node, bool) {
lparen, lp_ok := next_token(t)
if !lp_ok || lparen != "(" {
return nil, false
}
in_vals := make([dynamic]Attribute_Value)
for {
val_tok, vt_ok := next_token(t)
if !vt_ok {
for &v in in_vals {
attr_value_destroy(&v)
}
delete(in_vals)
return nil, false
}
val, val_ok := resolve_attribute_value(val_tok, values)
if !val_ok {
for &v in in_vals {
attr_value_destroy(&v)
}
delete(in_vals)
return nil, false
}
append(&in_vals, val)
sep_tok, sep_ok := next_token(t)
if !sep_ok {
for &v in in_vals {
attr_value_destroy(&v)
}
delete(in_vals)
return nil, false
}
if sep_tok == ")" {
break
}
if sep_tok != "," {
for &v in in_vals {
attr_value_destroy(&v)
}
delete(in_vals)
return nil, false
}
}
node := new(Filter_Node)
node.type = .In
node.path = path
node.in_values = in_vals[:]
return node, true
}
// ============================================================================
// Filter Expression Evaluation
// ============================================================================
evaluate_filter :: proc(item: Item, node: ^Filter_Node) -> bool {
if node == nil {
return true
}
switch node.type {
case .Comparison:
attr, found := item[node.path]
if !found {
return false
}
return evaluate_comparison(attr, node.comp_op, node.value)
case .Between:
attr, found := item[node.path]
if !found {
return false
}
lo_cmp := compare_attribute_values(attr, node.value)
if v2, ok := node.value2.?; ok {
hi_cmp := compare_attribute_values(attr, v2)
return lo_cmp >= 0 && hi_cmp <= 0
}
return false
case .In:
attr, found := item[node.path]
if !found {
return false
}
for in_val in node.in_values {
if compare_attribute_values(attr, in_val) == 0 {
return true
}
}
return false
case .Begins_With:
attr, found := item[node.path]
if !found {
return false
}
attr_str, attr_ok := attr_value_to_string_for_compare(attr)
val_str, val_ok := attr_value_to_string_for_compare(node.value)
if !attr_ok || !val_ok {
return false
}
return strings.has_prefix(attr_str, val_str)
case .Contains:
attr, found := item[node.path]
if !found {
return false
}
return evaluate_contains(attr, node.value)
case .Attribute_Exists:
_, found := item[node.path]
return found
case .Attribute_Not_Exists:
_, found := item[node.path]
return !found
case .And:
return evaluate_filter(item, node.left) && evaluate_filter(item, node.right)
case .Or:
return evaluate_filter(item, node.left) || evaluate_filter(item, node.right)
case .Not:
return !evaluate_filter(item, node.child)
}
return false
}
evaluate_comparison :: proc(attr: Attribute_Value, op: Comparison_Op, val: Attribute_Value) -> bool {
cmp := compare_attribute_values(attr, val)
switch op {
case .EQ: return cmp == 0
case .NE: return cmp != 0
case .LT: return cmp < 0
case .LE: return cmp <= 0
case .GT: return cmp > 0
case .GE: return cmp >= 0
}
return false
}
evaluate_contains :: proc(attr: Attribute_Value, val: Attribute_Value) -> bool {
// For strings: substring check
#partial switch a in attr {
case String:
if v, ok := val.(String); ok {
return strings.contains(string(a), string(v))
}
case String_Set:
if v, ok := val.(String); ok {
for s in a {
if s == string(v) {
return true
}
}
}
case Number_Set:
if v, ok := val.(Number); ok {
for n in a {
if n == string(v) {
return true
}
}
}
case List:
for item in a {
if compare_attribute_values(item, val) == 0 {
return true
}
}
}
return false
}
// Compare two AttributeValues. Returns <0, 0, or >0.
// For mixed types, returns -2 (not comparable).
compare_attribute_values :: proc(a: Attribute_Value, b: Attribute_Value) -> int {
a_str, a_ok := attr_value_to_string_for_compare(a)
b_str, b_ok := attr_value_to_string_for_compare(b)
if !a_ok || !b_ok {
// Try bool comparison
a_bool, a_is_bool := a.(Bool)
b_bool, b_is_bool := b.(Bool)
if a_is_bool && b_is_bool {
if bool(a_bool) == bool(b_bool) {
return 0
}
return -2
}
return -2
}
// For Numbers, do numeric comparison
_, a_is_num := a.(Number)
_, b_is_num := b.(Number)
if a_is_num && b_is_num {
a_val, a_parse := strconv.parse_f64(a_str)
b_val, b_parse := strconv.parse_f64(b_str)
if a_parse && b_parse {
if a_val < b_val {
return -1
}
if a_val > b_val {
return 1
}
return 0
}
}
return strings.compare(a_str, b_str)
}
// ============================================================================
// Request parsing helpers for FilterExpression
// ============================================================================
parse_filter_expression_string :: proc(request_body: []byte) -> (expr: string, ok: bool) {
data, parse_err := json.parse(request_body, allocator = context.temp_allocator)
if parse_err != nil {
return
}
defer json.destroy_value(data)
root, root_ok := data.(json.Object)
if !root_ok {
return
}
fe_val, found := root["FilterExpression"]
if !found {
return
}
fe_str, str_ok := fe_val.(json.String)
if !str_ok {
return
}
expr = string(fe_str)
ok = true
return
}

View File

@@ -1047,7 +1047,7 @@ evaluate_sort_key_condition :: proc(item: Item, skc: ^Sort_Key_Condition) -> boo
}
// Extract a comparable string from a scalar AttributeValue
@(private = "file")
// Package-visible: used by filter.odin for comparisons
attr_value_to_string_for_compare :: proc(attr: Attribute_Value) -> (string, bool) {
#partial switch v in attr {
case String:

935
dynamodb/update.odin Normal file
View File

@@ -0,0 +1,935 @@
// UpdateExpression Parser and Executor
// Supports: SET path = value [, path = value ...]
// REMOVE path [, path ...]
// ADD path value [, path value ...] (numeric add / set add)
// DELETE path value [, path value ...] (set remove)
//
// Values can be:
// :placeholder → resolved from ExpressionAttributeValues
// path + :placeholder → numeric addition
// path - :placeholder → numeric subtraction
// if_not_exists(path, :placeholder) → default value
// list_append(operand, operand) → list concatenation
package dynamodb
import "core:encoding/json"
import "core:fmt"
import "core:strconv"
import "core:strings"
// ============================================================================
// Update Plan — parsed representation of an UpdateExpression
// ============================================================================
Update_Action_Type :: enum {
SET,
REMOVE,
ADD,
DELETE,
}
Set_Value_Kind :: enum {
Direct, // SET x = :val
Plus, // SET x = x + :val or SET x = :val + x
Minus, // SET x = x - :val
If_Not_Exists, // SET x = if_not_exists(x, :val)
List_Append, // SET x = list_append(x, :val)
}
Set_Action :: struct {
path: string,
value_kind: Set_Value_Kind,
value: Attribute_Value, // primary value
source: string, // source path for Plus/Minus/If_Not_Exists/List_Append
value2: Maybe(Attribute_Value), // second operand for list_append where both are values
}
Remove_Action :: struct {
path: string,
}
Add_Action :: struct {
path: string,
value: Attribute_Value,
}
Delete_Action :: struct {
path: string,
value: Attribute_Value,
}
Update_Plan :: struct {
sets: [dynamic]Set_Action,
removes: [dynamic]Remove_Action,
adds: [dynamic]Add_Action,
deletes: [dynamic]Delete_Action,
}
update_plan_destroy :: proc(plan: ^Update_Plan) {
for &s in plan.sets {
attr_value_destroy(&s.value)
if v2, ok := s.value2.?; ok {
v2_copy := v2
attr_value_destroy(&v2_copy)
}
}
delete(plan.sets)
delete(plan.removes)
for &a in plan.adds {
attr_value_destroy(&a.value)
}
delete(plan.adds)
for &d in plan.deletes {
attr_value_destroy(&d.value)
}
delete(plan.deletes)
}
// ============================================================================
// Parse UpdateExpression
//
// Grammar (simplified):
// update_expr = clause { clause }
// clause = "SET" set_list | "REMOVE" remove_list | "ADD" add_list | "DELETE" delete_list
// set_list = set_entry { "," set_entry }
// set_entry = path "=" value_expr
// value_expr = :placeholder
// | path "+" :placeholder
// | path "-" :placeholder
// | "if_not_exists" "(" path "," :placeholder ")"
// | "list_append" "(" operand "," operand ")"
// remove_list = path { "," path }
// add_list = add_entry { "," add_entry }
// add_entry = path :placeholder
// delete_list = delete_entry { "," delete_entry }
// delete_entry= path :placeholder
// ============================================================================
parse_update_expression :: proc(
expression: string,
attribute_names: Maybe(map[string]string),
attribute_values: map[string]Attribute_Value,
) -> (plan: Update_Plan, ok: bool) {
plan.sets = make([dynamic]Set_Action)
plan.removes = make([dynamic]Remove_Action)
plan.adds = make([dynamic]Add_Action)
plan.deletes = make([dynamic]Delete_Action)
t := tokenizer_init(expression)
for {
keyword_maybe := tokenizer_next(&t)
keyword_str, has_keyword := keyword_maybe.?
if !has_keyword {
break // done
}
if strings.equal_fold(keyword_str, "SET") {
if !parse_set_clause(&t, &plan, attribute_names, attribute_values) {
update_plan_destroy(&plan)
return {}, false
}
} else if strings.equal_fold(keyword_str, "REMOVE") {
if !parse_remove_clause(&t, &plan, attribute_names) {
update_plan_destroy(&plan)
return {}, false
}
} else if strings.equal_fold(keyword_str, "ADD") {
if !parse_add_clause(&t, &plan, attribute_names, attribute_values) {
update_plan_destroy(&plan)
return {}, false
}
} else if strings.equal_fold(keyword_str, "DELETE") {
if !parse_delete_clause(&t, &plan, attribute_names, attribute_values) {
update_plan_destroy(&plan)
return {}, false
}
} else {
update_plan_destroy(&plan)
return {}, false
}
}
return plan, true
}
// ============================================================================
// SET clause parsing
// ============================================================================
parse_set_clause :: proc(
t: ^Tokenizer,
plan: ^Update_Plan,
names: Maybe(map[string]string),
values: map[string]Attribute_Value,
) -> bool {
saved_pos: int
for {
// Save position before reading so we can rewind if it's a clause keyword
saved_pos = t.pos
// Path
path_tok, path_ok := next_token(t)
if !path_ok {
return false
}
// Check if this is actually a new clause keyword (SET/REMOVE/ADD/DELETE)
if is_clause_keyword(path_tok) {
t.pos = saved_pos
return true
}
path, path_resolved := resolve_attribute_name(path_tok, names)
if !path_resolved {
return false
}
// "="
eq_tok, eq_ok := next_token(t)
if !eq_ok || eq_tok != "=" {
return false
}
// Value expression
action, act_ok := parse_set_value_expr(t, path, names, values)
if !act_ok {
return false
}
append(&plan.sets, action)
// Check for comma (more entries) or end
saved_pos = t.pos
comma_maybe := tokenizer_next(t)
if comma, has := comma_maybe.?; has {
if comma == "," {
continue
}
// Not a comma — put it back
t.pos = saved_pos
}
break
}
return true
}
parse_set_value_expr :: proc(
t: ^Tokenizer,
path: string,
names: Maybe(map[string]string),
values: map[string]Attribute_Value,
) -> (action: Set_Action, ok: bool) {
first_tok, first_ok := next_token(t)
if !first_ok {
return {}, false
}
// Check for if_not_exists(...)
if strings.equal_fold(first_tok, "if_not_exists") {
action, ok = parse_if_not_exists(t, path, names, values)
return
}
// Check for list_append(...)
if strings.equal_fold(first_tok, "list_append") {
action, ok = parse_list_append(t, path, names, values)
return
}
peek_pos: int
// Check if first token is a :placeholder (direct value)
if len(first_tok) > 0 && first_tok[0] == ':' {
// Could be :val + path or :val - path or just :val
peek_pos = t.pos
op_maybe := tokenizer_next(t)
if op, has_op := op_maybe.?; has_op && (op == "+" || op == "-") {
// :val op path
second_tok, sec_ok := next_token(t)
if !sec_ok {
return {}, false
}
source, source_resolved := resolve_attribute_name(second_tok, names)
if !source_resolved {
return {}, false
}
val, val_ok := resolve_attribute_value(first_tok, values)
if !val_ok {
return {}, false
}
kind := Set_Value_Kind.Plus if op == "+" else Set_Value_Kind.Minus
return Set_Action{
path = path,
value_kind = kind,
value = val,
source = source,
}, true
}
// Just a direct value
t.pos = peek_pos
val, val_ok := resolve_attribute_value(first_tok, values)
if !val_ok {
return {}, false
}
return Set_Action{
path = path,
value_kind = .Direct,
value = val,
}, true
}
// First token is a path — check for path + :val or path - :val
source, source_resolved := resolve_attribute_name(first_tok, names)
if !source_resolved {
return {}, false
}
peek_pos = t.pos
op_maybe := tokenizer_next(t)
if op, has_op := op_maybe.?; has_op && (op == "+" || op == "-") {
val_tok, vt_ok := next_token(t)
if !vt_ok {
return {}, false
}
val, val_ok := resolve_attribute_value(val_tok, values)
if !val_ok {
return {}, false
}
kind := Set_Value_Kind.Plus if op == "+" else Set_Value_Kind.Minus
return Set_Action{
path = path,
value_kind = kind,
value = val,
source = source,
}, true
}
// Just a path reference — treat as direct copy (SET a = b)
t.pos = peek_pos
return {}, false
}
parse_if_not_exists :: proc(
t: ^Tokenizer,
path: string,
names: Maybe(map[string]string),
values: map[string]Attribute_Value,
) -> (action: Set_Action, ok: bool) {
lparen, lp_ok := next_token(t)
if !lp_ok || lparen != "(" {
return {}, false
}
src_tok, src_ok := next_token(t)
if !src_ok {
return {}, false
}
source, source_resolved := resolve_attribute_name(src_tok, names)
if !source_resolved {
return {}, false
}
comma, comma_ok := next_token(t)
if !comma_ok || comma != "," {
return {}, false
}
val_tok, vt_ok := next_token(t)
if !vt_ok {
return {}, false
}
val, val_ok := resolve_attribute_value(val_tok, values)
if !val_ok {
return {}, false
}
rparen, rp_ok := next_token(t)
if !rp_ok || rparen != ")" {
attr_value_destroy(&val)
return {}, false
}
return Set_Action{
path = path,
value_kind = .If_Not_Exists,
value = val,
source = source,
}, true
}
parse_list_append :: proc(
t: ^Tokenizer,
path: string,
names: Maybe(map[string]string),
values: map[string]Attribute_Value,
) -> (action: Set_Action, ok: bool) {
lparen, lp_ok := next_token(t)
if !lp_ok || lparen != "(" {
return {}, false
}
// First operand — could be :val or path
first_tok, first_ok := next_token(t)
if !first_ok {
return {}, false
}
comma, comma_ok := next_token(t)
if !comma_ok || comma != "," {
return {}, false
}
// Second operand
second_tok, second_ok := next_token(t)
if !second_ok {
return {}, false
}
rparen, rp_ok := next_token(t)
if !rp_ok || rparen != ")" {
return {}, false
}
// Determine which is the path and which is the value
// Common patterns: list_append(path, :val) or list_append(:val, path)
source: string
val: Attribute_Value
resolved: bool
if len(first_tok) > 0 && first_tok[0] == ':' {
// list_append(:val, path)
v, v_ok := resolve_attribute_value(first_tok, values)
if !v_ok {
return {}, false
}
val = v
source, resolved = resolve_attribute_name(second_tok, names)
if !resolved {
return {}, false
}
} else if len(second_tok) > 0 && second_tok[0] == ':' {
// list_append(path, :val)
source, resolved = resolve_attribute_name(first_tok, names)
if !resolved {
return {}, false
}
v, v_ok := resolve_attribute_value(second_tok, values)
if !v_ok {
return {}, false
}
val = v
} else {
return {}, false
}
return Set_Action{
path = path,
value_kind = .List_Append,
value = val,
source = source,
}, true
}
// ============================================================================
// REMOVE clause parsing
// ============================================================================
parse_remove_clause :: proc(
t: ^Tokenizer,
plan: ^Update_Plan,
names: Maybe(map[string]string),
) -> bool {
saved_pos: int
for {
saved_pos = t.pos
path_tok, path_ok := next_token(t)
if !path_ok {
return false
}
if is_clause_keyword(path_tok) {
t.pos = saved_pos
return true
}
path, path_resolved := resolve_attribute_name(path_tok, names)
if !path_resolved {
return false
}
append(&plan.removes, Remove_Action{path = path})
saved_pos = t.pos
comma_maybe := tokenizer_next(t)
if comma, has := comma_maybe.?; has {
if comma == "," {
continue
}
t.pos = saved_pos
}
break
}
return true
}
// ============================================================================
// ADD clause parsing
// ============================================================================
parse_add_clause :: proc(
t: ^Tokenizer,
plan: ^Update_Plan,
names: Maybe(map[string]string),
values: map[string]Attribute_Value,
) -> bool {
saved_pos: int
for {
saved_pos = t.pos
path_tok, path_ok := next_token(t)
if !path_ok {
return false
}
if is_clause_keyword(path_tok) {
t.pos = saved_pos
return true
}
path, path_resolved := resolve_attribute_name(path_tok, names)
if !path_resolved {
return false
}
val_tok, vt_ok := next_token(t)
if !vt_ok {
return false
}
val, val_ok := resolve_attribute_value(val_tok, values)
if !val_ok {
return false
}
append(&plan.adds, Add_Action{path = path, value = val})
saved_pos = t.pos
comma_maybe := tokenizer_next(t)
if comma, has := comma_maybe.?; has {
if comma == "," {
continue
}
t.pos = saved_pos
}
break
}
return true
}
// ============================================================================
// DELETE clause parsing
// ============================================================================
parse_delete_clause :: proc(
t: ^Tokenizer,
plan: ^Update_Plan,
names: Maybe(map[string]string),
values: map[string]Attribute_Value,
) -> bool {
saved_pos: int
for {
saved_pos = t.pos
path_tok, path_ok := next_token(t)
if !path_ok {
return false
}
if is_clause_keyword(path_tok) {
t.pos = saved_pos
return true
}
path, path_resolved := resolve_attribute_name(path_tok, names)
if !path_resolved {
return false
}
val_tok, vt_ok := next_token(t)
if !vt_ok {
return false
}
val, val_ok := resolve_attribute_value(val_tok, values)
if !val_ok {
return false
}
append(&plan.deletes, Delete_Action{path = path, value = val})
saved_pos = t.pos
comma_maybe := tokenizer_next(t)
if comma, has := comma_maybe.?; has {
if comma == "," {
continue
}
t.pos = saved_pos
}
break
}
return true
}
// ============================================================================
// Helpers
// ============================================================================
is_clause_keyword :: proc(tok: string) -> bool {
return strings.equal_fold(tok, "SET") ||
strings.equal_fold(tok, "REMOVE") ||
strings.equal_fold(tok, "ADD") ||
strings.equal_fold(tok, "DELETE")
}
// ============================================================================
// Execute Update Plan — apply mutations to an Item (in-place)
// ============================================================================
execute_update_plan :: proc(item: ^Item, plan: ^Update_Plan) -> bool {
// Execute SET actions
for &action in plan.sets {
switch action.value_kind {
case .Direct:
// Remove old value if exists
if old, found := item[action.path]; found {
old_copy := old
attr_value_destroy(&old_copy)
delete_key(item, action.path)
}
item[strings.clone(action.path)] = attr_value_deep_copy(action.value)
case .Plus:
// Numeric addition: path = source + value or path = value + source
existing: Attribute_Value
if src, found := item[action.source]; found {
existing = src
} else {
return false // source attribute not found
}
result, add_ok := numeric_add(existing, action.value)
if !add_ok {
return false
}
if old, found := item[action.path]; found {
old_copy := old
attr_value_destroy(&old_copy)
delete_key(item, action.path)
}
item[strings.clone(action.path)] = result
case .Minus:
// Numeric subtraction: path = source - value
existing: Attribute_Value
if src, found := item[action.source]; found {
existing = src
} else {
return false
}
result, sub_ok := numeric_subtract(existing, action.value)
if !sub_ok {
return false
}
if old, found := item[action.path]; found {
old_copy := old
attr_value_destroy(&old_copy)
delete_key(item, action.path)
}
item[strings.clone(action.path)] = result
case .If_Not_Exists:
// Only set if attribute doesn't exist
if _, found := item[action.source]; !found {
if old, found2 := item[action.path]; found2 {
old_copy := old
attr_value_destroy(&old_copy)
delete_key(item, action.path)
}
item[strings.clone(action.path)] = attr_value_deep_copy(action.value)
}
// If attribute exists, do nothing (keep current value)
case .List_Append:
// Append to list
existing_list: []Attribute_Value
if src, found := item[action.source]; found {
if l, is_list := src.(List); is_list {
existing_list = ([]Attribute_Value)(l)
} else {
return false
}
} else {
existing_list = {}
}
append_list: []Attribute_Value
if l, is_list := action.value.(List); is_list {
append_list = ([]Attribute_Value)(l)
} else {
return false
}
new_list := make([]Attribute_Value, len(existing_list) + len(append_list))
for item_val, i in existing_list {
new_list[i] = attr_value_deep_copy(item_val)
}
for item_val, i in append_list {
new_list[len(existing_list) + i] = attr_value_deep_copy(item_val)
}
if old, found := item[action.path]; found {
old_copy := old
attr_value_destroy(&old_copy)
delete_key(item, action.path)
}
item[strings.clone(action.path)] = List(new_list)
}
}
// Execute REMOVE actions
for &action in plan.removes {
if old, found := item[action.path]; found {
old_copy := old
attr_value_destroy(&old_copy)
delete_key(item, action.path)
}
}
// Execute ADD actions
for &action in plan.adds {
if existing, found := item[action.path]; found {
// If existing is a number, add numerically
#partial switch v in existing {
case Number:
result, add_ok := numeric_add(existing, action.value)
if !add_ok {
return false
}
old_copy := existing
attr_value_destroy(&old_copy)
delete_key(item, action.path)
item[strings.clone(action.path)] = result
case String_Set:
// Add elements to string set
if new_ss, is_ss := action.value.(String_Set); is_ss {
merged := set_union_strings(([]string)(v), ([]string)(new_ss))
old_copy := existing
attr_value_destroy(&old_copy)
delete_key(item, action.path)
item[strings.clone(action.path)] = String_Set(merged)
} else {
return false
}
case Number_Set:
if new_ns, is_ns := action.value.(Number_Set); is_ns {
merged := set_union_strings(([]string)(v), ([]string)(new_ns))
old_copy := existing
attr_value_destroy(&old_copy)
delete_key(item, action.path)
item[strings.clone(action.path)] = Number_Set(merged)
} else {
return false
}
case:
return false
}
} else {
// Attribute doesn't exist — create it
item[strings.clone(action.path)] = attr_value_deep_copy(action.value)
}
}
// Execute DELETE actions (remove elements from sets)
for &action in plan.deletes {
if existing, found := item[action.path]; found {
#partial switch v in existing {
case String_Set:
if del_ss, is_ss := action.value.(String_Set); is_ss {
result := set_difference_strings(([]string)(v), ([]string)(del_ss))
old_copy := existing
attr_value_destroy(&old_copy)
delete_key(item, action.path)
if len(result) > 0 {
item[strings.clone(action.path)] = String_Set(result)
} else {
delete(result)
}
}
case Number_Set:
if del_ns, is_ns := action.value.(Number_Set); is_ns {
result := set_difference_strings(([]string)(v), ([]string)(del_ns))
old_copy := existing
attr_value_destroy(&old_copy)
delete_key(item, action.path)
if len(result) > 0 {
item[strings.clone(action.path)] = Number_Set(result)
} else {
delete(result)
}
}
case:
// DELETE on non-set type is a no-op in DynamoDB
}
}
}
return true
}
// ============================================================================
// Numeric helpers
// ============================================================================
numeric_add :: proc(a: Attribute_Value, b: Attribute_Value) -> (Attribute_Value, bool) {
a_num, a_ok := a.(Number)
b_num, b_ok := b.(Number)
if !a_ok || !b_ok {
return nil, false
}
a_val, a_parse := strconv.parse_f64(string(a_num))
b_val, b_parse := strconv.parse_f64(string(b_num))
if !a_parse || !b_parse {
return nil, false
}
result := a_val + b_val
result_str := format_number(result)
return Number(result_str), true
}
numeric_subtract :: proc(a: Attribute_Value, b: Attribute_Value) -> (Attribute_Value, bool) {
a_num, a_ok := a.(Number)
b_num, b_ok := b.(Number)
if !a_ok || !b_ok {
return nil, false
}
a_val, a_parse := strconv.parse_f64(string(a_num))
b_val, b_parse := strconv.parse_f64(string(b_num))
if !a_parse || !b_parse {
return nil, false
}
result := a_val - b_val
result_str := format_number(result)
return Number(result_str), true
}
format_number :: proc(val: f64) -> string {
// If it's an integer, format without decimal point
int_val := i64(val)
if f64(int_val) == val {
return fmt.aprintf("%d", int_val)
}
return fmt.aprintf("%g", val)
}
// ============================================================================
// Set helpers
// ============================================================================
set_union_strings :: proc(a: []string, b: []string) -> []string {
seen := make(map[string]bool, allocator = context.temp_allocator)
for s in a {
seen[s] = true
}
for s in b {
seen[s] = true
}
result := make([]string, len(seen))
i := 0
for s in seen {
result[i] = strings.clone(s)
i += 1
}
return result
}
set_difference_strings :: proc(a: []string, b: []string) -> []string {
to_remove := make(map[string]bool, allocator = context.temp_allocator)
for s in b {
to_remove[s] = true
}
result := make([dynamic]string)
for s in a {
if !(s in to_remove) {
append(&result, strings.clone(s))
}
}
return result[:]
}
// ============================================================================
// Request Parsing Helper
// ============================================================================
parse_update_expression_string :: proc(request_body: []byte) -> (expr: string, ok: bool) {
data, parse_err := json.parse(request_body, allocator = context.temp_allocator)
if parse_err != nil {
return
}
defer json.destroy_value(data)
root, root_ok := data.(json.Object)
if !root_ok {
return
}
ue_val, found := root["UpdateExpression"]
if !found {
return
}
ue_str, str_ok := ue_val.(json.String)
if !str_ok {
return
}
expr = string(ue_str)
ok = true
return
}
// Parse ReturnValues from request body
parse_return_values :: proc(request_body: []byte) -> string {
data, parse_err := json.parse(request_body, allocator = context.temp_allocator)
if parse_err != nil {
return "NONE"
}
defer json.destroy_value(data)
root, root_ok := data.(json.Object)
if !root_ok {
return "NONE"
}
rv_val, found := root["ReturnValues"]
if !found {
return "NONE"
}
rv_str, str_ok := rv_val.(json.String)
if !str_ok {
return "NONE"
}
return string(rv_str)
}

125
dynamodb/update_item.odin Normal file
View File

@@ -0,0 +1,125 @@
// update_item.odin — Storage layer UpdateItem operation
// This file lives in the dynamodb/ package alongside storage.odin
package dynamodb
import "core:strings"
import "core:sync"
import "../rocksdb"
// UpdateItem — fetch existing item, apply update plan, write back
// Uses EXCLUSIVE lock (write operation)
//
// Returns:
// - old_item: the item BEFORE mutations (if it existed), for ReturnValues
// - new_item: the item AFTER mutations
// - error
update_item :: proc(
engine: ^Storage_Engine,
table_name: string,
key_item: Item,
plan: ^Update_Plan,
) -> (old_item: Maybe(Item), new_item: Maybe(Item), err: Storage_Error) {
table_lock := get_or_create_table_lock(engine, table_name)
sync.rw_mutex_lock(table_lock)
defer sync.rw_mutex_unlock(table_lock)
// Get table metadata
metadata, meta_err := get_table_metadata(engine, table_name)
if meta_err != .None {
return nil, nil, meta_err
}
defer table_metadata_destroy(&metadata, engine.allocator)
// Extract key from the provided key item
key_struct, key_ok := key_from_item(key_item, metadata.key_schema)
if !key_ok {
return nil, nil, .Missing_Key_Attribute
}
defer key_destroy(&key_struct)
// Get key values
key_values, kv_ok := key_get_values(&key_struct)
if !kv_ok {
return nil, nil, .Invalid_Key
}
// Build storage key
storage_key := build_data_key(table_name, key_values.pk, key_values.sk)
defer delete(storage_key)
// Fetch existing item (if any)
existing_encoded, get_err := rocksdb.db_get(&engine.db, storage_key)
existing_item: Item
if get_err == .None && existing_encoded != nil {
defer delete(existing_encoded)
decoded, decode_ok := decode(existing_encoded)
if !decode_ok {
return nil, nil, .Serialization_Error
}
existing_item = decoded
// Save old item for ReturnValues
old_item = item_deep_copy(existing_item)
} else if get_err == .NotFound || existing_encoded == nil {
// Item doesn't exist yet — start with just the key attributes
existing_item = make(Item)
for ks in metadata.key_schema {
if val, found := key_item[ks.attribute_name]; found {
existing_item[strings.clone(ks.attribute_name)] = attr_value_deep_copy(val)
}
}
} else {
return nil, nil, .RocksDB_Error
}
// Apply update plan
if !execute_update_plan(&existing_item, plan) {
item_destroy(&existing_item)
if old, has := old_item.?; has {
old_copy := old
item_destroy(&old_copy)
}
return nil, nil, .Invalid_Key
}
// Validate key attributes are still present and correct type
validation_err := validate_item_key_types(
existing_item, metadata.key_schema, metadata.attribute_definitions,
)
if validation_err != .None {
item_destroy(&existing_item)
if old, has := old_item.?; has {
old_copy := old
item_destroy(&old_copy)
}
return nil, nil, validation_err
}
// Encode updated item
encoded_item, encode_ok := encode(existing_item)
if !encode_ok {
item_destroy(&existing_item)
if old, has := old_item.?; has {
old_copy := old
item_destroy(&old_copy)
}
return nil, nil, .Serialization_Error
}
defer delete(encoded_item)
// Write back to RocksDB
put_err := rocksdb.db_put(&engine.db, storage_key, encoded_item)
if put_err != .None {
item_destroy(&existing_item)
if old, has := old_item.?; has {
old_copy := old
item_destroy(&old_copy)
}
return nil, nil, .RocksDB_Error
}
new_item = existing_item
return old_item, new_item, .None
}

328
main.odin
View File

@@ -362,18 +362,136 @@ handle_delete_item :: proc(engine: ^dynamodb.Storage_Engine, request: ^HTTP_Requ
response_set_body(response, transmute([]byte)string("{}"))
}
// UpdateItem — minimal stub: supports SET for scalar attributes
// UpdateItem ...
handle_update_item :: proc(engine: ^dynamodb.Storage_Engine, request: ^HTTP_Request, response: ^HTTP_Response) {
// TODO: Implement UpdateExpression parsing (SET x = :val, REMOVE y, etc.)
// For now, return a clear error so callers know it's not yet supported.
make_error_response(response, .ValidationException,
"UpdateItem is not yet supported. Use PutItem to replace the full item.")
// Parse TableName
table_name, ok := dynamodb.parse_table_name(request.body)
if !ok {
make_error_response(response, .ValidationException, "Invalid request or missing TableName")
return
}
// Parse Key
key_item, key_ok := dynamodb.parse_key_from_request(request.body)
if !key_ok {
make_error_response(response, .ValidationException, "Invalid or missing Key")
return
}
defer dynamodb.item_destroy(&key_item)
// Parse UpdateExpression
update_expr, ue_ok := dynamodb.parse_update_expression_string(request.body)
if !ue_ok {
make_error_response(response, .ValidationException, "Missing or invalid UpdateExpression")
return
}
// Parse ExpressionAttributeNames and ExpressionAttributeValues
attr_names := dynamodb.parse_expression_attribute_names(request.body)
defer {
if names, has_names := attr_names.?; has_names {
for k, v in names {
delete(k)
delete(v)
}
names_copy := names
delete(names_copy)
}
}
attr_values, vals_ok := dynamodb.parse_expression_attribute_values(request.body)
if !vals_ok {
make_error_response(response, .ValidationException, "Invalid ExpressionAttributeValues")
return
}
defer {
for k, v in attr_values {
delete(k)
v_copy := v
dynamodb.attr_value_destroy(&v_copy)
}
delete(attr_values)
}
// Parse update plan
plan, plan_ok := dynamodb.parse_update_expression(update_expr, attr_names, attr_values)
if !plan_ok {
make_error_response(response, .ValidationException, "Failed to parse UpdateExpression")
return
}
defer dynamodb.update_plan_destroy(&plan)
// Parse ReturnValues
return_values := dynamodb.parse_return_values(request.body)
// Execute update
old_item, new_item, err := dynamodb.update_item(engine, table_name, key_item, &plan)
if err != .None {
handle_storage_error(response, err)
return
}
defer {
if old, has := old_item.?; has {
old_copy := old
dynamodb.item_destroy(&old_copy)
}
if new_val, has := new_item.?; has {
new_copy := new_val
dynamodb.item_destroy(&new_copy)
}
}
// Build response based on ReturnValues
switch return_values {
case "ALL_NEW":
if new_val, has := new_item.?; has {
item_json := dynamodb.serialize_item(new_val)
resp := fmt.aprintf(`{"Attributes":%s}`, item_json)
response_set_body(response, transmute([]byte)resp)
} else {
response_set_body(response, transmute([]byte)string("{}"))
}
case "ALL_OLD":
if old, has := old_item.?; has {
item_json := dynamodb.serialize_item(old)
resp := fmt.aprintf(`{"Attributes":%s}`, item_json)
response_set_body(response, transmute([]byte)resp)
} else {
response_set_body(response, transmute([]byte)string("{}"))
}
case "UPDATED_NEW":
// Return only the attributes that were updated (in the new item)
// For simplicity, return the full new item (DynamoDB returns affected attributes)
if new_val, has := new_item.?; has {
item_json := dynamodb.serialize_item(new_val)
resp := fmt.aprintf(`{"Attributes":%s}`, item_json)
response_set_body(response, transmute([]byte)resp)
} else {
response_set_body(response, transmute([]byte)string("{}"))
}
case "UPDATED_OLD":
if old, has := old_item.?; has {
item_json := dynamodb.serialize_item(old)
resp := fmt.aprintf(`{"Attributes":%s}`, item_json)
response_set_body(response, transmute([]byte)resp)
} else {
response_set_body(response, transmute([]byte)string("{}"))
}
case:
// "NONE" or default
response_set_body(response, transmute([]byte)string("{}"))
}
}
// ============================================================================
// Query and Scan Operations
// ============================================================================
// handle_query ...
handle_query :: proc(engine: ^dynamodb.Storage_Engine, request: ^HTTP_Request, response: ^HTTP_Response) {
table_name, ok := dynamodb.parse_table_name(request.body)
if !ok {
@@ -381,7 +499,7 @@ handle_query :: proc(engine: ^dynamodb.Storage_Engine, request: ^HTTP_Request, r
return
}
// ---- Fetch table metadata early so we can parse ExclusiveStartKey ----
// Fetch table metadata early for ExclusiveStartKey parsing
metadata, meta_err := dynamodb.get_table_metadata(engine, table_name)
if meta_err != .None {
handle_storage_error(response, meta_err)
@@ -404,7 +522,6 @@ handle_query :: proc(engine: ^dynamodb.Storage_Engine, request: ^HTTP_Request, r
return
}
// Clone pk_bytes so it survives kc cleanup (kc borrows from the parsed value)
pk_owned := make([]byte, len(pk_bytes))
copy(pk_owned, pk_bytes)
defer delete(pk_owned)
@@ -415,7 +532,7 @@ handle_query :: proc(engine: ^dynamodb.Storage_Engine, request: ^HTTP_Request, r
limit = 100
}
// ---- Parse ExclusiveStartKey with proper type handling ----
// Parse ExclusiveStartKey
exclusive_start_key, esk_ok := dynamodb.parse_exclusive_start_key(
request.body, table_name, metadata.key_schema,
)
@@ -429,7 +546,7 @@ handle_query :: proc(engine: ^dynamodb.Storage_Engine, request: ^HTTP_Request, r
}
}
// ---- Pass sort key condition through to storage layer ----
// Pass sort key condition through
sk_condition: Maybe(dynamodb.Sort_Key_Condition) = nil
if skc, has_skc := kc.sk_condition.?; has_skc {
sk_condition = skc
@@ -442,10 +559,62 @@ handle_query :: proc(engine: ^dynamodb.Storage_Engine, request: ^HTTP_Request, r
}
defer dynamodb.query_result_destroy(&result)
// Build response with proper pagination
write_items_response_with_pagination(response, result.items, result.last_evaluated_key, &metadata)
// ---- Parse ExpressionAttributeNames/Values for filter/projection ----
attr_names := dynamodb.parse_expression_attribute_names(request.body)
defer {
if names, has_names := attr_names.?; has_names {
for k, v in names {
delete(k)
delete(v)
}
names_copy := names
delete(names_copy)
}
}
attr_values, _ := dynamodb.parse_expression_attribute_values(request.body)
defer {
for k, v in attr_values {
delete(k)
v_copy := v
dynamodb.attr_value_destroy(&v_copy)
}
delete(attr_values)
}
// ---- Apply FilterExpression (post-query filter) ----
filtered_items := apply_filter_to_items(request.body, result.items, attr_names, attr_values)
scanned_count := len(result.items)
// ---- Apply ProjectionExpression ----
projection, has_proj := dynamodb.parse_projection_expression(request.body, attr_names)
final_items: []dynamodb.Item
if has_proj && len(projection) > 0 {
projected := make([]dynamodb.Item, len(filtered_items))
for item, i in filtered_items {
projected[i] = dynamodb.apply_projection(item, projection)
}
final_items = projected
} else {
final_items = filtered_items
}
// Build response
write_items_response_with_pagination_ex(
response, final_items, result.last_evaluated_key, &metadata, scanned_count,
)
// Cleanup projected items if we created them
if has_proj && len(projection) > 0 {
for &item in final_items {
dynamodb.item_destroy(&item)
}
delete(final_items)
}
}
// handle_scan ...
handle_scan :: proc(engine: ^dynamodb.Storage_Engine, request: ^HTTP_Request, response: ^HTTP_Response) {
table_name, ok := dynamodb.parse_table_name(request.body)
if !ok {
@@ -453,7 +622,6 @@ handle_scan :: proc(engine: ^dynamodb.Storage_Engine, request: ^HTTP_Request, re
return
}
// ---- Fetch table metadata early so we can parse ExclusiveStartKey ----
metadata, meta_err := dynamodb.get_table_metadata(engine, table_name)
if meta_err != .None {
handle_storage_error(response, meta_err)
@@ -461,13 +629,11 @@ handle_scan :: proc(engine: ^dynamodb.Storage_Engine, request: ^HTTP_Request, re
}
defer dynamodb.table_metadata_destroy(&metadata, engine.allocator)
// Parse Limit (default to 100 if not specified)
limit := dynamodb.parse_limit(request.body)
if limit == 0 {
limit = 100
}
// ---- Parse ExclusiveStartKey with proper type handling ----
exclusive_start_key, esk_ok := dynamodb.parse_exclusive_start_key(
request.body, table_name, metadata.key_schema,
)
@@ -481,7 +647,6 @@ handle_scan :: proc(engine: ^dynamodb.Storage_Engine, request: ^HTTP_Request, re
}
}
// Perform scan
result, err := dynamodb.scan(engine, table_name, exclusive_start_key, limit)
if err != .None {
handle_storage_error(response, err)
@@ -489,9 +654,138 @@ handle_scan :: proc(engine: ^dynamodb.Storage_Engine, request: ^HTTP_Request, re
}
defer dynamodb.scan_result_destroy(&result)
// Build response with proper pagination
write_items_response_with_pagination(response, result.items, result.last_evaluated_key, &metadata)
// ---- Parse ExpressionAttributeNames/Values for filter/projection ----
attr_names := dynamodb.parse_expression_attribute_names(request.body)
defer {
if names, has_names := attr_names.?; has_names {
for k, v in names {
delete(k)
delete(v)
}
names_copy := names
delete(names_copy)
}
}
attr_values, _ := dynamodb.parse_expression_attribute_values(request.body)
defer {
for k, v in attr_values {
delete(k)
v_copy := v
dynamodb.attr_value_destroy(&v_copy)
}
delete(attr_values)
}
// ---- Apply FilterExpression ----
filtered_items := apply_filter_to_items(request.body, result.items, attr_names, attr_values)
scanned_count := len(result.items)
// ---- Apply ProjectionExpression ----
projection, has_proj := dynamodb.parse_projection_expression(request.body, attr_names)
final_items: []dynamodb.Item
if has_proj && len(projection) > 0 {
projected := make([]dynamodb.Item, len(filtered_items))
for item, i in filtered_items {
projected[i] = dynamodb.apply_projection(item, projection)
}
final_items = projected
} else {
final_items = filtered_items
}
// Build response
write_items_response_with_pagination_ex(
response, final_items, result.last_evaluated_key, &metadata, scanned_count,
)
if has_proj && len(projection) > 0 {
for &item in final_items {
dynamodb.item_destroy(&item)
}
delete(final_items)
}
}
// ============================================================================
// Shared helper: apply FilterExpression to a set of items
// ============================================================================
apply_filter_to_items :: proc(
request_body: []byte,
items: []dynamodb.Item,
attr_names: Maybe(map[string]string),
attr_values: map[string]dynamodb.Attribute_Value,
) -> []dynamodb.Item {
filter_expr, has_filter := dynamodb.parse_filter_expression_string(request_body)
if !has_filter {
return items // no filter, return as-is
}
filter_node, filter_ok := dynamodb.parse_filter_expression(filter_expr, attr_names, attr_values)
if !filter_ok || filter_node == nil {
return items // failed to parse, return unfiltered
}
defer {
dynamodb.filter_node_destroy(filter_node)
free(filter_node)
}
// Filter items
filtered := make([dynamic]dynamodb.Item)
for item in items {
if dynamodb.evaluate_filter(item, filter_node) {
append(&filtered, item)
}
}
return filtered[:]
}
// ============================================================================
// Extended pagination response builder (includes ScannedCount vs Count)
//
// DynamoDB distinguishes:
// Count = number of items AFTER FilterExpression
// ScannedCount = number of items BEFORE FilterExpression
// ============================================================================
write_items_response_with_pagination_ex :: proc(
response: ^HTTP_Response,
items: []dynamodb.Item,
last_evaluated_key_binary: Maybe([]byte),
metadata: ^dynamodb.Table_Metadata,
scanned_count: int,
) {
builder := strings.builder_make()
strings.write_string(&builder, `{"Items":[`)
for item, i in items {
if i > 0 do strings.write_string(&builder, ",")
item_json := dynamodb.serialize_item(item)
strings.write_string(&builder, item_json)
}
strings.write_string(&builder, `],"Count":`)
fmt.sbprintf(&builder, "%d", len(items))
strings.write_string(&builder, `,"ScannedCount":`)
fmt.sbprintf(&builder, "%d", scanned_count)
if binary_key, has_last := last_evaluated_key_binary.?; has_last {
lek_json, lek_ok := dynamodb.serialize_last_evaluated_key(binary_key, metadata)
if lek_ok {
strings.write_string(&builder, `,"LastEvaluatedKey":`)
strings.write_string(&builder, lek_json)
}
}
strings.write_string(&builder, "}")
resp_body := strings.to_string(builder)
response_set_body(response, transmute([]byte)resp_body)
}
// ============================================================================
// Shared Pagination Response Builder