flesh out the query stuff
This commit is contained in:
490
dynamodb/expression.odin
Normal file
490
dynamodb/expression.odin
Normal file
@@ -0,0 +1,490 @@
|
||||
// DynamoDB Expression Parser
|
||||
// Parses KeyConditionExpression with ExpressionAttributeNames and ExpressionAttributeValues
|
||||
// Supports: pk = :pk, pk = :pk AND sk > :sk, begins_with(sk, :prefix), BETWEEN, etc.
|
||||
package dynamodb
|
||||
|
||||
import "core:encoding/json"
|
||||
import "core:strings"
|
||||
|
||||
// ============================================================================
|
||||
// Sort Key Condition Operators
|
||||
// ============================================================================
|
||||
|
||||
Sort_Key_Operator :: enum {
|
||||
EQ, // =
|
||||
LT, // <
|
||||
LE, // <=
|
||||
GT, // >
|
||||
GE, // >=
|
||||
BETWEEN, // BETWEEN x AND y
|
||||
BEGINS_WITH, // begins_with(sk, prefix)
|
||||
}
|
||||
|
||||
// ============================================================================
|
||||
// Parsed Structures
|
||||
// ============================================================================
|
||||
|
||||
Sort_Key_Condition :: struct {
|
||||
sk_name: string,
|
||||
operator: Sort_Key_Operator,
|
||||
value: Attribute_Value,
|
||||
value2: Maybe(Attribute_Value),
|
||||
}
|
||||
|
||||
sort_key_condition_destroy :: proc(skc: ^Sort_Key_Condition) {
|
||||
attr_value_destroy(&skc.value)
|
||||
if v2, ok := skc.value2.?; ok {
|
||||
v2_copy := v2
|
||||
attr_value_destroy(&v2_copy)
|
||||
}
|
||||
}
|
||||
|
||||
Key_Condition :: struct {
|
||||
pk_name: string,
|
||||
pk_value: Attribute_Value,
|
||||
sk_condition: Maybe(Sort_Key_Condition),
|
||||
}
|
||||
|
||||
key_condition_destroy :: proc(kc: ^Key_Condition) {
|
||||
attr_value_destroy(&kc.pk_value)
|
||||
if skc, ok := kc.sk_condition.?; ok {
|
||||
skc_copy := skc
|
||||
sort_key_condition_destroy(&skc_copy)
|
||||
}
|
||||
}
|
||||
|
||||
// Get the raw partition key value bytes for building storage keys
|
||||
key_condition_get_pk_bytes :: proc(kc: ^Key_Condition) -> ([]byte, bool) {
|
||||
#partial switch v in kc.pk_value {
|
||||
case String:
|
||||
return transmute([]byte)string(v), true
|
||||
case Number:
|
||||
return transmute([]byte)string(v), true
|
||||
case Binary:
|
||||
return transmute([]byte)string(v), true
|
||||
}
|
||||
return nil, false
|
||||
}
|
||||
|
||||
// ============================================================================
|
||||
// Tokenizer
|
||||
// ============================================================================
|
||||
|
||||
Tokenizer :: struct {
|
||||
input: string,
|
||||
pos: int,
|
||||
}
|
||||
|
||||
tokenizer_init :: proc(input: string) -> Tokenizer {
|
||||
return Tokenizer{input = input, pos = 0}
|
||||
}
|
||||
|
||||
tokenizer_next :: proc(t: ^Tokenizer) -> Maybe(string) {
|
||||
// Skip whitespace
|
||||
for t.pos < len(t.input) && is_whitespace(t.input[t.pos]) {
|
||||
t.pos += 1
|
||||
}
|
||||
|
||||
if t.pos >= len(t.input) {
|
||||
return nil
|
||||
}
|
||||
|
||||
start := t.pos
|
||||
c := t.input[t.pos]
|
||||
|
||||
// Single-character tokens
|
||||
if c == '(' || c == ')' || c == ',' {
|
||||
t.pos += 1
|
||||
return t.input[start:t.pos]
|
||||
}
|
||||
|
||||
// Two-character operators
|
||||
if t.pos + 1 < len(t.input) {
|
||||
two := t.input[t.pos:t.pos + 2]
|
||||
if two == "<=" || two == ">=" || two == "<>" {
|
||||
t.pos += 2
|
||||
return two
|
||||
}
|
||||
}
|
||||
|
||||
// Single-character operators
|
||||
if c == '=' || c == '<' || c == '>' {
|
||||
t.pos += 1
|
||||
return t.input[start:t.pos]
|
||||
}
|
||||
|
||||
// Identifier or keyword (includes :placeholder and #name)
|
||||
for t.pos < len(t.input) && is_ident_char(t.input[t.pos]) {
|
||||
t.pos += 1
|
||||
}
|
||||
|
||||
if t.pos > start {
|
||||
return t.input[start:t.pos]
|
||||
}
|
||||
|
||||
// Unknown character, skip it
|
||||
t.pos += 1
|
||||
return tokenizer_next(t)
|
||||
}
|
||||
|
||||
@(private = "file")
|
||||
is_whitespace :: proc(c: byte) -> bool {
|
||||
return c == ' ' || c == '\t' || c == '\n' || c == '\r'
|
||||
}
|
||||
|
||||
@(private = "file")
|
||||
is_ident_char :: proc(c: byte) -> bool {
|
||||
return (c >= 'a' && c <= 'z') ||
|
||||
(c >= 'A' && c <= 'Z') ||
|
||||
(c >= '0' && c <= '9') ||
|
||||
c == '_' || c == ':' || c == '#' || c == '-' || c == '.'
|
||||
}
|
||||
|
||||
// ---------------------------------------------------------------------------
|
||||
// Helper: convert Maybe(string) tokens into (string, bool) so or_return works.
|
||||
// ---------------------------------------------------------------------------
|
||||
|
||||
@(private = "file")
|
||||
next_token :: proc(t: ^Tokenizer) -> (tok: string, ok: bool) {
|
||||
if v, has := tokenizer_next(t).?; has {
|
||||
tok = v
|
||||
ok = true
|
||||
return
|
||||
}
|
||||
return
|
||||
}
|
||||
|
||||
// ============================================================================
|
||||
// Expression Parsing
|
||||
// ============================================================================
|
||||
|
||||
parse_key_condition_expression :: proc(
|
||||
expression: string,
|
||||
attribute_names: Maybe(map[string]string),
|
||||
attribute_values: map[string]Attribute_Value,
|
||||
) -> (kc: Key_Condition, ok: bool) {
|
||||
t := tokenizer_init(expression)
|
||||
|
||||
pk_name_token := next_token(&t) or_return
|
||||
pk_name := resolve_attribute_name(pk_name_token, attribute_names) or_return
|
||||
|
||||
eq_token := next_token(&t) or_return
|
||||
if eq_token != "=" {
|
||||
return
|
||||
}
|
||||
|
||||
pk_value_token := next_token(&t) or_return
|
||||
pk_value, pk_ok := resolve_attribute_value(pk_value_token, attribute_values)
|
||||
if !pk_ok {
|
||||
return
|
||||
}
|
||||
|
||||
sk_condition: Maybe(Sort_Key_Condition) = nil
|
||||
|
||||
// Optional "AND ..."
|
||||
if and_token, has_and := tokenizer_next(&t).?; has_and {
|
||||
if !strings.equal_fold(and_token, "AND") {
|
||||
attr_value_destroy(&pk_value)
|
||||
return
|
||||
}
|
||||
|
||||
skc, skc_ok := parse_sort_key_condition(&t, attribute_names, attribute_values)
|
||||
if !skc_ok {
|
||||
attr_value_destroy(&pk_value)
|
||||
return
|
||||
}
|
||||
sk_condition = skc
|
||||
}
|
||||
|
||||
|
||||
kc = Key_Condition{
|
||||
pk_name = pk_name,
|
||||
pk_value = pk_value,
|
||||
sk_condition = sk_condition,
|
||||
}
|
||||
ok = true
|
||||
return
|
||||
}
|
||||
|
||||
@(private = "file")
|
||||
parse_sort_key_condition :: proc(
|
||||
t: ^Tokenizer,
|
||||
attribute_names: Maybe(map[string]string),
|
||||
attribute_values: map[string]Attribute_Value,
|
||||
) -> (skc: Sort_Key_Condition, ok: bool) {
|
||||
first_token := next_token(t) or_return
|
||||
|
||||
if strings.equal_fold(first_token, "begins_with") {
|
||||
skc, ok = parse_begins_with(t, attribute_names, attribute_values)
|
||||
return
|
||||
}
|
||||
|
||||
sk_name := resolve_attribute_name(first_token, attribute_names) or_return
|
||||
|
||||
op_token := next_token(t) or_return
|
||||
operator, op_ok := parse_operator(op_token)
|
||||
if !op_ok {
|
||||
return
|
||||
}
|
||||
|
||||
value_token := next_token(t) or_return
|
||||
value, val_ok := resolve_attribute_value(value_token, attribute_values)
|
||||
if !val_ok {
|
||||
return
|
||||
}
|
||||
|
||||
value2: Maybe(Attribute_Value) = nil
|
||||
if operator == .BETWEEN {
|
||||
// IMPORTANT: after allocating `value`, do NOT use `or_return` without cleanup.
|
||||
and_token, tok_ok := next_token(t)
|
||||
if !tok_ok || !strings.equal_fold(and_token, "AND") {
|
||||
attr_value_destroy(&value)
|
||||
return
|
||||
}
|
||||
|
||||
value2_token, tok2_ok := next_token(t)
|
||||
if !tok2_ok {
|
||||
attr_value_destroy(&value)
|
||||
return
|
||||
}
|
||||
|
||||
v2, v2_ok := resolve_attribute_value(value2_token, attribute_values)
|
||||
if !v2_ok {
|
||||
attr_value_destroy(&value)
|
||||
return
|
||||
}
|
||||
value2 = v2
|
||||
}
|
||||
|
||||
skc = Sort_Key_Condition{
|
||||
sk_name = sk_name,
|
||||
operator = operator,
|
||||
value = value,
|
||||
value2 = value2,
|
||||
}
|
||||
ok = true
|
||||
return
|
||||
}
|
||||
|
||||
@(private = "file")
|
||||
parse_begins_with :: proc(
|
||||
t: ^Tokenizer,
|
||||
attribute_names: Maybe(map[string]string),
|
||||
attribute_values: map[string]Attribute_Value,
|
||||
) -> (skc: Sort_Key_Condition, ok: bool) {
|
||||
lparen := next_token(t) or_return
|
||||
if lparen != "(" {
|
||||
return
|
||||
}
|
||||
|
||||
sk_name_token := next_token(t) or_return
|
||||
sk_name := resolve_attribute_name(sk_name_token, attribute_names) or_return
|
||||
|
||||
comma := next_token(t) or_return
|
||||
if comma != "," {
|
||||
return
|
||||
}
|
||||
|
||||
value_token := next_token(t) or_return
|
||||
value, val_ok := resolve_attribute_value(value_token, attribute_values)
|
||||
if !val_ok {
|
||||
return
|
||||
}
|
||||
|
||||
// after allocating `value`, avoid `or_return` so we can clean up
|
||||
rparen, tok_ok := next_token(t)
|
||||
if !tok_ok || rparen != ")" {
|
||||
attr_value_destroy(&value)
|
||||
return
|
||||
}
|
||||
|
||||
skc = Sort_Key_Condition{
|
||||
sk_name = sk_name,
|
||||
operator = .BEGINS_WITH,
|
||||
value = value,
|
||||
value2 = nil,
|
||||
}
|
||||
ok = true
|
||||
return
|
||||
}
|
||||
|
||||
@(private = "file")
|
||||
parse_operator :: proc(token: string) -> (Sort_Key_Operator, bool) {
|
||||
if token == "=" do return .EQ, true
|
||||
if token == "<" do return .LT, true
|
||||
if token == "<=" do return .LE, true
|
||||
if token == ">" do return .GT, true
|
||||
if token == ">=" do return .GE, true
|
||||
if strings.equal_fold(token, "BETWEEN") do return .BETWEEN, true
|
||||
return .EQ, false
|
||||
}
|
||||
|
||||
@(private = "file")
|
||||
resolve_attribute_name :: proc(token: string, names: Maybe(map[string]string)) -> (string, bool) {
|
||||
if len(token) > 0 && token[0] == '#' {
|
||||
if n, has_names := names.?; has_names {
|
||||
if resolved, found := n[token]; found {
|
||||
return resolved, true
|
||||
}
|
||||
}
|
||||
return "", false
|
||||
}
|
||||
return token, true
|
||||
}
|
||||
|
||||
@(private = "file")
|
||||
resolve_attribute_value :: proc(
|
||||
token: string,
|
||||
values: map[string]Attribute_Value,
|
||||
) -> (Attribute_Value, bool) {
|
||||
if len(token) > 0 && token[0] == ':' {
|
||||
if original, found := values[token]; found {
|
||||
return attr_value_deep_copy(original), true
|
||||
}
|
||||
return nil, false
|
||||
}
|
||||
return nil, false
|
||||
}
|
||||
|
||||
// ============================================================================
|
||||
// Request Parsing Helpers
|
||||
// ============================================================================
|
||||
|
||||
parse_expression_attribute_names :: proc(request_body: []byte) -> Maybe(map[string]string) {
|
||||
data, parse_err := json.parse(request_body, allocator = context.temp_allocator)
|
||||
if parse_err != nil {
|
||||
return nil
|
||||
}
|
||||
defer json.destroy_value(data)
|
||||
|
||||
root, ok := data.(json.Object)
|
||||
if !ok {
|
||||
return nil
|
||||
}
|
||||
|
||||
names_val, found := root["ExpressionAttributeNames"]
|
||||
if !found {
|
||||
return nil
|
||||
}
|
||||
|
||||
names_obj, names_ok := names_val.(json.Object)
|
||||
if !names_ok {
|
||||
return nil
|
||||
}
|
||||
|
||||
result := make(map[string]string)
|
||||
|
||||
for key, val in names_obj {
|
||||
str, str_ok := val.(json.String)
|
||||
if !str_ok {
|
||||
continue
|
||||
}
|
||||
result[strings.clone(key)] = strings.clone(string(str))
|
||||
}
|
||||
|
||||
return result
|
||||
}
|
||||
|
||||
parse_expression_attribute_values :: proc(request_body: []byte) -> (map[string]Attribute_Value, bool) {
|
||||
data, parse_err := json.parse(request_body, allocator = context.temp_allocator)
|
||||
if parse_err != nil {
|
||||
return make(map[string]Attribute_Value), true
|
||||
}
|
||||
defer json.destroy_value(data)
|
||||
|
||||
root, ok := data.(json.Object)
|
||||
if !ok {
|
||||
return make(map[string]Attribute_Value), true
|
||||
}
|
||||
|
||||
values_val, found := root["ExpressionAttributeValues"]
|
||||
if !found {
|
||||
return make(map[string]Attribute_Value), true
|
||||
}
|
||||
|
||||
values_obj, values_ok := values_val.(json.Object)
|
||||
if !values_ok {
|
||||
return make(map[string]Attribute_Value), true
|
||||
}
|
||||
|
||||
result := make(map[string]Attribute_Value)
|
||||
|
||||
for key, val in values_obj {
|
||||
attr, attr_ok := parse_attribute_value(val)
|
||||
if !attr_ok {
|
||||
continue
|
||||
}
|
||||
result[strings.clone(key)] = attr
|
||||
}
|
||||
|
||||
return result, true
|
||||
}
|
||||
|
||||
// NOTE: changed from Maybe(string) -> (string, bool) so callers can use or_return.
|
||||
parse_key_condition_expression_string :: proc(request_body: []byte) -> (expr: string, ok: bool) {
|
||||
body_str := string(request_body)
|
||||
|
||||
marker :: "\"KeyConditionExpression\""
|
||||
start_idx := strings.index(body_str, marker)
|
||||
if start_idx < 0 {
|
||||
return
|
||||
}
|
||||
|
||||
after_marker := body_str[start_idx + len(marker):]
|
||||
colon_idx := strings.index(after_marker, ":")
|
||||
if colon_idx < 0 {
|
||||
return
|
||||
}
|
||||
|
||||
rest := after_marker[colon_idx + 1:]
|
||||
quote_start := strings.index(rest, "\"")
|
||||
if quote_start < 0 {
|
||||
return
|
||||
}
|
||||
|
||||
value_start := quote_start + 1
|
||||
pos := value_start
|
||||
for pos < len(rest) {
|
||||
if rest[pos] == '"' && (pos == 0 || rest[pos - 1] != '\\') {
|
||||
expr = rest[value_start:pos]
|
||||
ok = true
|
||||
return
|
||||
}
|
||||
pos += 1
|
||||
}
|
||||
|
||||
return
|
||||
}
|
||||
|
||||
// Convenience: parse a complete Query key condition from request body
|
||||
parse_query_key_condition :: proc(request_body: []byte) -> (kc: Key_Condition, ok: bool) {
|
||||
expression := parse_key_condition_expression_string(request_body) or_return
|
||||
|
||||
attr_names := parse_expression_attribute_names(request_body)
|
||||
defer {
|
||||
if names, has_names := attr_names.?; has_names {
|
||||
for k, v in names {
|
||||
delete(k)
|
||||
delete(v)
|
||||
}
|
||||
names_copy := names
|
||||
delete(names_copy)
|
||||
}
|
||||
}
|
||||
|
||||
attr_values, vals_ok := parse_expression_attribute_values(request_body)
|
||||
if !vals_ok {
|
||||
return
|
||||
}
|
||||
defer {
|
||||
for k, v in attr_values {
|
||||
delete(k)
|
||||
v_copy := v
|
||||
attr_value_destroy(&v_copy)
|
||||
}
|
||||
delete(attr_values)
|
||||
}
|
||||
|
||||
kc, ok = parse_key_condition_expression(expression, attr_names, attr_values)
|
||||
return
|
||||
}
|
||||
@@ -2,9 +2,11 @@
|
||||
package dynamodb
|
||||
|
||||
import "core:c"
|
||||
import "core:encoding/json"
|
||||
import "core:fmt"
|
||||
import "core:mem"
|
||||
import "core:slice"
|
||||
import "core:strconv"
|
||||
import "core:strings"
|
||||
import "core:sync"
|
||||
import "core:time"
|
||||
@@ -95,6 +97,16 @@ table_metadata_get_partition_key_name :: proc(metadata: ^Table_Metadata) -> Mayb
|
||||
return nil
|
||||
}
|
||||
|
||||
// Get the attribute type for a given attribute name
|
||||
table_metadata_get_attribute_type :: proc(metadata: ^Table_Metadata, attr_name: string) -> Maybe(Scalar_Attribute_Type) {
|
||||
for ad in metadata.attribute_definitions {
|
||||
if ad.attribute_name == attr_name {
|
||||
return ad.attribute_type
|
||||
}
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
// Get the sort key attribute name (if any)
|
||||
table_metadata_get_sort_key_name :: proc(metadata: ^Table_Metadata) -> Maybe(string) {
|
||||
for ks in metadata.key_schema {
|
||||
@@ -229,12 +241,196 @@ deserialize_table_metadata :: proc(data: []byte, allocator: mem.Allocator) -> (T
|
||||
|
||||
metadata: Table_Metadata
|
||||
|
||||
// TODO: Parse KeySchema and AttributeDefinitions from JSON strings
|
||||
// For now, return empty - this will be implemented when needed
|
||||
// Parse table status
|
||||
if status_val, found := meta_item["TableStatus"]; found {
|
||||
#partial switch v in status_val {
|
||||
case String:
|
||||
metadata.table_status = table_status_from_string(string(v))
|
||||
}
|
||||
} else {
|
||||
metadata.table_status = .ACTIVE
|
||||
}
|
||||
|
||||
// Parse creation date time
|
||||
if time_val, found := meta_item["CreationDateTime"]; found {
|
||||
#partial switch v in time_val {
|
||||
case Number:
|
||||
val, parse_ok := strconv.parse_i64(string(v))
|
||||
metadata.creation_date_time = val if parse_ok else 0
|
||||
}
|
||||
}
|
||||
|
||||
// Parse KeySchema from embedded JSON string
|
||||
if ks_val, found := meta_item["KeySchema"]; found {
|
||||
#partial switch v in ks_val {
|
||||
case String:
|
||||
ks, ks_ok := parse_key_schema_json(string(v), allocator)
|
||||
if ks_ok {
|
||||
metadata.key_schema = ks
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Parse AttributeDefinitions from embedded JSON string
|
||||
if ad_val, found := meta_item["AttributeDefinitions"]; found {
|
||||
#partial switch v in ad_val {
|
||||
case String:
|
||||
ad, ad_ok := parse_attr_defs_json(string(v), allocator)
|
||||
if ad_ok {
|
||||
metadata.attribute_definitions = ad
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return metadata, true
|
||||
}
|
||||
|
||||
// Parse key schema from JSON string like [{"AttributeName":"id","KeyType":"HASH"}]
|
||||
parse_key_schema_json :: proc(json_str: string, allocator: mem.Allocator) -> ([]Key_Schema_Element, bool) {
|
||||
data, parse_err := json.parse(transmute([]byte)json_str, allocator = context.temp_allocator)
|
||||
if parse_err != nil {
|
||||
return nil, false
|
||||
}
|
||||
defer json.destroy_value(data)
|
||||
|
||||
arr, ok := data.(json.Array)
|
||||
if !ok {
|
||||
return nil, false
|
||||
}
|
||||
|
||||
result := make([]Key_Schema_Element, len(arr), allocator)
|
||||
|
||||
for elem, i in arr {
|
||||
obj, obj_ok := elem.(json.Object)
|
||||
if !obj_ok {
|
||||
cleanup_key_schema(result[:i], allocator)
|
||||
delete(result, allocator)
|
||||
return nil, false
|
||||
}
|
||||
|
||||
attr_name_val, name_found := obj["AttributeName"]
|
||||
if !name_found {
|
||||
cleanup_key_schema(result[:i], allocator)
|
||||
delete(result, allocator)
|
||||
return nil, false
|
||||
}
|
||||
|
||||
attr_name, name_ok := attr_name_val.(json.String)
|
||||
if !name_ok {
|
||||
cleanup_key_schema(result[:i], allocator)
|
||||
delete(result, allocator)
|
||||
return nil, false
|
||||
}
|
||||
|
||||
key_type_val, type_found := obj["KeyType"]
|
||||
if !type_found {
|
||||
cleanup_key_schema(result[:i], allocator)
|
||||
delete(result, allocator)
|
||||
return nil, false
|
||||
}
|
||||
|
||||
key_type_str, type_ok := key_type_val.(json.String)
|
||||
if !type_ok {
|
||||
cleanup_key_schema(result[:i], allocator)
|
||||
delete(result, allocator)
|
||||
return nil, false
|
||||
}
|
||||
|
||||
kt, kt_ok := key_type_from_string(string(key_type_str))
|
||||
if !kt_ok {
|
||||
cleanup_key_schema(result[:i], allocator)
|
||||
delete(result, allocator)
|
||||
return nil, false
|
||||
}
|
||||
|
||||
result[i] = Key_Schema_Element{
|
||||
attribute_name = strings.clone(string(attr_name), allocator),
|
||||
key_type = kt,
|
||||
}
|
||||
}
|
||||
|
||||
return result, true
|
||||
}
|
||||
|
||||
cleanup_key_schema :: proc(elems: []Key_Schema_Element, allocator: mem.Allocator) {
|
||||
for ks in elems {
|
||||
delete(ks.attribute_name, allocator)
|
||||
}
|
||||
}
|
||||
|
||||
// Parse attribute definitions from JSON string
|
||||
parse_attr_defs_json :: proc(json_str: string, allocator: mem.Allocator) -> ([]Attribute_Definition, bool) {
|
||||
data, parse_err := json.parse(transmute([]byte)json_str, allocator = context.temp_allocator)
|
||||
if parse_err != nil {
|
||||
return nil, false
|
||||
}
|
||||
defer json.destroy_value(data)
|
||||
|
||||
arr, ok := data.(json.Array)
|
||||
if !ok {
|
||||
return nil, false
|
||||
}
|
||||
|
||||
result := make([]Attribute_Definition, len(arr), allocator)
|
||||
|
||||
for elem, i in arr {
|
||||
obj, obj_ok := elem.(json.Object)
|
||||
if !obj_ok {
|
||||
cleanup_attr_defs(result[:i], allocator)
|
||||
delete(result, allocator)
|
||||
return nil, false
|
||||
}
|
||||
|
||||
attr_name_val, name_found := obj["AttributeName"]
|
||||
if !name_found {
|
||||
cleanup_attr_defs(result[:i], allocator)
|
||||
delete(result, allocator)
|
||||
return nil, false
|
||||
}
|
||||
|
||||
attr_name, name_ok := attr_name_val.(json.String)
|
||||
if !name_ok {
|
||||
cleanup_attr_defs(result[:i], allocator)
|
||||
delete(result, allocator)
|
||||
return nil, false
|
||||
}
|
||||
|
||||
attr_type_val, type_found := obj["AttributeType"]
|
||||
if !type_found {
|
||||
cleanup_attr_defs(result[:i], allocator)
|
||||
delete(result, allocator)
|
||||
return nil, false
|
||||
}
|
||||
|
||||
attr_type_str, type_ok := attr_type_val.(json.String)
|
||||
if !type_ok {
|
||||
cleanup_attr_defs(result[:i], allocator)
|
||||
delete(result, allocator)
|
||||
return nil, false
|
||||
}
|
||||
|
||||
at, at_ok := scalar_type_from_string(string(attr_type_str))
|
||||
if !at_ok {
|
||||
cleanup_attr_defs(result[:i], allocator)
|
||||
delete(result, allocator)
|
||||
return nil, false
|
||||
}
|
||||
|
||||
result[i] = Attribute_Definition{
|
||||
attribute_name = strings.clone(string(attr_name), allocator),
|
||||
attribute_type = at,
|
||||
}
|
||||
}
|
||||
|
||||
return result, true
|
||||
}
|
||||
|
||||
cleanup_attr_defs :: proc(elems: []Attribute_Definition, allocator: mem.Allocator) {
|
||||
for ad in elems {
|
||||
delete(ad.attribute_name, allocator)
|
||||
}
|
||||
}
|
||||
|
||||
// Get table metadata
|
||||
get_table_metadata :: proc(engine: ^Storage_Engine, table_name: string) -> (Table_Metadata, Storage_Error) {
|
||||
meta_key := build_meta_key(table_name)
|
||||
@@ -616,6 +812,93 @@ scan :: proc(
|
||||
}, .None
|
||||
}
|
||||
|
||||
// Query items by partition key with optional pagination
|
||||
query :: proc(
|
||||
engine: ^Storage_Engine,
|
||||
table_name: string,
|
||||
partition_key_value: []byte,
|
||||
exclusive_start_key: Maybe([]byte),
|
||||
limit: int,
|
||||
) -> (Query_Result, Storage_Error) {
|
||||
table_lock := get_or_create_table_lock(engine, table_name)
|
||||
sync.rw_mutex_shared_lock(table_lock)
|
||||
defer sync.rw_mutex_shared_unlock(table_lock)
|
||||
|
||||
// Verify table exists
|
||||
metadata, meta_err := get_table_metadata(engine, table_name)
|
||||
if meta_err != .None {
|
||||
return {}, meta_err
|
||||
}
|
||||
defer table_metadata_destroy(&metadata, engine.allocator)
|
||||
|
||||
// Build partition prefix
|
||||
prefix := build_partition_prefix(table_name, partition_key_value)
|
||||
defer delete(prefix)
|
||||
|
||||
iter, iter_err := rocksdb.iter_create(&engine.db)
|
||||
if iter_err != .None {
|
||||
return {}, .RocksDB_Error
|
||||
}
|
||||
defer rocksdb.iter_destroy(&iter)
|
||||
|
||||
max_items := limit if limit > 0 else 1_000_000
|
||||
|
||||
// Seek to start position
|
||||
if start_key, has_start := exclusive_start_key.?; has_start {
|
||||
if has_prefix(start_key, prefix) {
|
||||
rocksdb.iter_seek(&iter, start_key)
|
||||
if rocksdb.iter_valid(&iter) {
|
||||
rocksdb.iter_next(&iter)
|
||||
}
|
||||
} else {
|
||||
rocksdb.iter_seek(&iter, prefix)
|
||||
}
|
||||
} else {
|
||||
rocksdb.iter_seek(&iter, prefix)
|
||||
}
|
||||
|
||||
items := make([dynamic]Item)
|
||||
count := 0
|
||||
last_key: Maybe([]byte) = nil
|
||||
|
||||
for rocksdb.iter_valid(&iter) {
|
||||
key := rocksdb.iter_key(&iter)
|
||||
if key == nil || !has_prefix(key, prefix) {
|
||||
break
|
||||
}
|
||||
|
||||
// Hit limit — save this key as pagination token and stop
|
||||
if count >= max_items {
|
||||
last_key = slice.clone(key)
|
||||
break
|
||||
}
|
||||
|
||||
value := rocksdb.iter_value(&iter)
|
||||
if value == nil {
|
||||
rocksdb.iter_next(&iter)
|
||||
continue
|
||||
}
|
||||
|
||||
item, decode_ok := decode(value)
|
||||
if !decode_ok {
|
||||
rocksdb.iter_next(&iter)
|
||||
continue
|
||||
}
|
||||
|
||||
append(&items, item)
|
||||
count += 1
|
||||
rocksdb.iter_next(&iter)
|
||||
}
|
||||
|
||||
result_items := make([]Item, len(items))
|
||||
copy(result_items, items[:])
|
||||
|
||||
return Query_Result{
|
||||
items = result_items,
|
||||
last_evaluated_key = last_key,
|
||||
}, .None
|
||||
}
|
||||
|
||||
// Helper to check if a byte slice has a prefix
|
||||
has_prefix :: proc(data: []byte, prefix: []byte) -> bool {
|
||||
if len(data) < len(prefix) {
|
||||
@@ -629,8 +912,39 @@ has_prefix :: proc(data: []byte, prefix: []byte) -> bool {
|
||||
return true
|
||||
}
|
||||
|
||||
// List tables (simplified - returns empty list for now)
|
||||
list_tables :: proc(engine: ^Storage_Engine) -> []string {
|
||||
// TODO: Implement by iterating over meta keys
|
||||
return {}
|
||||
}
|
||||
// List tables by iterating over meta keys in RocksDB
|
||||
list_tables :: proc(engine: ^Storage_Engine) -> ([]string, Storage_Error) {
|
||||
iter, iter_err := rocksdb.iter_create(&engine.db)
|
||||
if iter_err != .None {
|
||||
return nil, .RocksDB_Error
|
||||
}
|
||||
defer rocksdb.iter_destroy(&iter)
|
||||
|
||||
meta_prefix := []byte{u8(Entity_Type.Meta)}
|
||||
rocksdb.iter_seek(&iter, meta_prefix)
|
||||
|
||||
tables := make([dynamic]string)
|
||||
|
||||
for rocksdb.iter_valid(&iter) {
|
||||
key := rocksdb.iter_key(&iter)
|
||||
if key == nil || len(key) == 0 || key[0] != u8(Entity_Type.Meta) {
|
||||
break
|
||||
}
|
||||
|
||||
decoder := Key_Decoder{data = key, pos = 0}
|
||||
_, et_ok := decoder_read_entity_type(&decoder)
|
||||
if !et_ok {
|
||||
break
|
||||
}
|
||||
|
||||
tbl_name_bytes, seg_ok := decoder_read_segment_borrowed(&decoder)
|
||||
if !seg_ok {
|
||||
break
|
||||
}
|
||||
|
||||
append(&tables, strings.clone(string(tbl_name_bytes)))
|
||||
rocksdb.iter_next(&iter)
|
||||
}
|
||||
|
||||
return tables[:], .None
|
||||
}
|
||||
@@ -253,6 +253,18 @@ table_status_to_string :: proc(status: Table_Status) -> string {
|
||||
return "ACTIVE"
|
||||
}
|
||||
|
||||
table_status_from_string :: proc(s: string) -> Table_Status {
|
||||
switch s {
|
||||
case "CREATING": return .CREATING
|
||||
case "UPDATING": return .UPDATING
|
||||
case "DELETING": return .DELETING
|
||||
case "ACTIVE": return .ACTIVE
|
||||
case "ARCHIVING": return .ARCHIVING
|
||||
case "ARCHIVED": return .ARCHIVED
|
||||
}
|
||||
return .ACTIVE
|
||||
}
|
||||
|
||||
// Table description
|
||||
Table_Description :: struct {
|
||||
table_name: string,
|
||||
@@ -352,6 +364,17 @@ error_to_response :: proc(err_type: DynamoDB_Error_Type, message: string) -> str
|
||||
return fmt.aprintf(`{{"__type":"%s","message":"%s"}}`, type_str, message)
|
||||
}
|
||||
|
||||
// Build an Attribute_Value with the correct scalar type from raw bytes
|
||||
build_attribute_value_with_type :: proc(raw_bytes: []byte, attr_type: Scalar_Attribute_Type) -> Attribute_Value {
|
||||
owned := strings.clone(string(raw_bytes))
|
||||
switch attr_type {
|
||||
case .S: return String(owned)
|
||||
case .N: return Number(owned)
|
||||
case .B: return Binary(owned)
|
||||
}
|
||||
return String(owned)
|
||||
}
|
||||
|
||||
// Deep copy an attribute value
|
||||
attr_value_deep_copy :: proc(attr: Attribute_Value) -> Attribute_Value {
|
||||
switch v in attr {
|
||||
|
||||
Reference in New Issue
Block a user