Compare commits

...

2 Commits

Author SHA1 Message Date
a6bf357228 fix more tingz 2026-02-16 18:50:02 -05:00
78a4ea7a0c fix tingz 2026-02-16 18:00:59 -05:00
4 changed files with 156 additions and 23 deletions

64
Dockerfile.test_sdk Normal file
View File

@@ -0,0 +1,64 @@
# Multi-stage build for Odin + Python test environment with RocksDB
FROM debian:bookworm-slim AS odin-builder
# Install dependencies for building Odin
RUN apt-get update && apt-get install -y \
git \
curl \
build-essential \
clang \
llvm \
&& rm -rf /var/lib/apt/lists/*
# Install Odin compiler
WORKDIR /opt
RUN git clone https://github.com/odin-lang/Odin.git odin \
&& cd odin \
&& ./build_odin.sh release
# Final stage with both Odin and Python
FROM python:3.12-slim
# Install runtime and build dependencies including RocksDB
RUN apt-get update && apt-get install -y \
clang \
llvm \
make \
git \
build-essential \
cmake \
pkg-config \
# RocksDB and compression libraries
librocksdb-dev \
librocksdb8.7 \
# Compression libraries that RocksDB depends on
libsnappy-dev \
libgflags-dev \
libz-dev \
libbz2-dev \
liblz4-dev \
libzstd-dev \
# Additional common dependencies
libssl-dev \
libcurl4-openssl-dev \
&& rm -rf /var/lib/apt/lists/*
# Copy Odin compiler from builder stage
COPY --from=odin-builder /opt/odin /opt/odin
# Add Odin to PATH
ENV PATH="/opt/odin:${PATH}"
ENV ODIN_ROOT="/opt/odin"
# Set up library paths for RocksDB
ENV LD_LIBRARY_PATH="/usr/lib/x86_64-linux-gnu:${LD_LIBRARY_PATH}"
ENV PKG_CONFIG_PATH="/usr/lib/x86_64-linux-gnu/pkgconfig:${PKG_CONFIG_PATH}"
# Install Python dependencies
RUN pip install --no-cache-dir boto3 pytest requests
# Set working directory
WORKDIR /workspace
# Default command
CMD ["/bin/bash"]

View File

@@ -1,4 +1,4 @@
.PHONY: all build release run test clean fmt help install sdk_test .PHONY: all build release run test clean fmt help install
# Project configuration # Project configuration
PROJECT_NAME := jormundb PROJECT_NAME := jormundb
@@ -6,6 +6,66 @@ ODIN := odin
BUILD_DIR := build BUILD_DIR := build
SRC_DIR := . SRC_DIR := .
# Docker configuration for test SDK
TEST_SDK_IMAGE := your-dockerhub-username/odin-python-test-sdk
TEST_SDK_TAG := latest
JORMUN_PORT ?= 8002
# Build the test SDK Docker image
.PHONY: build-test-sdk
build-test-sdk:
@echo "Building test SDK Docker image..."
docker build -f Dockerfile_test_sdk -t $(TEST_SDK_IMAGE):$(TEST_SDK_TAG) .
@echo "Test SDK image built successfully"
# Push the test SDK image to registry
.PHONY: push-test-sdk
push-test-sdk: build-test-sdk
@echo "Pushing test SDK image to registry..."
docker push $(TEST_SDK_IMAGE):$(TEST_SDK_TAG)
@echo "Test SDK image pushed successfully"
# Pull the test SDK image from registry
.PHONY: pull-test-sdk
pull-test-sdk:
@echo "Pulling test SDK image from registry..."
docker pull $(TEST_SDK_IMAGE):$(TEST_SDK_TAG)
# Run SDK tests in the consolidated container
.PHONY: test-sdk
test-sdk:
@echo "Running SDK tests..."
docker run --rm \
--network host \
-v $(PWD):/workspace \
-w /workspace \
-e JORMUN_ENDPOINT=http://localhost:$(JORMUN_PORT) \
-e AWS_ACCESS_KEY_ID=local \
-e AWS_SECRET_ACCESS_KEY=local \
-e AWS_DEFAULT_REGION=us-east-1 \
$(TEST_SDK_IMAGE):$(TEST_SDK_TAG) \
sh -c "make build && python tests/sdk/test_sdk.py"
# Run SDK tests with live rebuild (for development)
.PHONY: test-sdk-dev
test-sdk-dev:
@echo "Running SDK tests with live rebuild..."
docker run --rm -it \
--network host \
-v $(PWD):/workspace \
-w /workspace \
-e JORMUN_ENDPOINT=http://localhost:$(JORMUN_PORT) \
-e AWS_ACCESS_KEY_ID=local \
-e AWS_SECRET_ACCESS_KEY=local \
-e AWS_DEFAULT_REGION=us-east-1 \
$(TEST_SDK_IMAGE):$(TEST_SDK_TAG) \
/bin/bash
# One-time setup: build and push test SDK image
.PHONY: setup-test-sdk
setup-test-sdk: build-test-sdk push-test-sdk
@echo "Test SDK setup complete"
# C++ shim (WAL replication helpers via RocksDB C++ API) # C++ shim (WAL replication helpers via RocksDB C++ API)
SHIM_DIR := rocksdb_shim SHIM_DIR := rocksdb_shim
SHIM_LIB := $(BUILD_DIR)/libjormun_rocksdb_shim.a SHIM_LIB := $(BUILD_DIR)/libjormun_rocksdb_shim.a
@@ -176,12 +236,6 @@ aws-test: run &
--table-name TestTable --table-name TestTable
@echo "\n$(GREEN)✓ AWS CLI test complete$(NC)" @echo "\n$(GREEN)✓ AWS CLI test complete$(NC)"
# Python SDK integration tests (requires JormunDB running on localhost)
sdk_test:
@echo "$(BLUE)Running Python SDK tests against localhost:$(PORT)...$(NC)"
@docker compose -f $(SDK_TEST_COMPOSE) down --remove-orphans 2>/dev/null || true
@JORMUN_PORT=$(PORT) docker compose -f $(SDK_TEST_COMPOSE) run --rm --build sdk-test
@docker compose -f $(SDK_TEST_COMPOSE) down --remove-orphans 2>/dev/null || true
# Development workflow # Development workflow
dev: clean build run dev: clean build run
@@ -208,7 +262,6 @@ help:
@echo "$(GREEN)Test Commands:$(NC)" @echo "$(GREEN)Test Commands:$(NC)"
@echo " make test - Run unit tests" @echo " make test - Run unit tests"
@echo " make aws-test - Test with AWS CLI commands" @echo " make aws-test - Test with AWS CLI commands"
@echo " make sdk_test - Run Python SDK integration tests (requires running server)"
@echo "" @echo ""
@echo "$(GREEN)Utility Commands:$(NC)" @echo "$(GREEN)Utility Commands:$(NC)"
@echo " make fmt - Format source code" @echo " make fmt - Format source code"

View File

@@ -5,6 +5,7 @@ package dynamodb
import "core:encoding/json" import "core:encoding/json"
import "core:strings" import "core:strings"
import "core:mem"
// ============================================================================ // ============================================================================
// ProjectionExpression // ProjectionExpression
@@ -142,6 +143,7 @@ Filter_Node :: struct {
right: ^Filter_Node, right: ^Filter_Node,
// For Not // For Not
child: ^Filter_Node, child: ^Filter_Node,
allocator: mem.Allocator, // allocator that created this node
} }
filter_node_destroy :: proc(node: ^Filter_Node) { filter_node_destroy :: proc(node: ^Filter_Node) {
@@ -171,8 +173,8 @@ filter_node_destroy :: proc(node: ^Filter_Node) {
filter_node_destroy(node.child) filter_node_destroy(node.child)
} }
// Free the node itself (allocated with new(Filter_Node)) // Free the node itself using the allocator that created it
free(node) free(node, node.allocator)
} }
// ============================================================================ // ============================================================================
@@ -224,7 +226,7 @@ parse_or_expr :: proc(
return nil, false return nil, false
} }
parent := new(Filter_Node) parent := make_filter_node()
parent.type = .Or parent.type = .Or
parent.left = left parent.left = left
parent.right = right parent.right = right
@@ -263,7 +265,7 @@ parse_and_expr :: proc(
return nil, false return nil, false
} }
parent := new(Filter_Node) parent := make_filter_node()
parent.type = .And parent.type = .And
parent.left = left parent.left = left
parent.right = right parent.right = right
@@ -294,7 +296,7 @@ parse_not_expr :: proc(
if !child_ok { if !child_ok {
return nil, false return nil, false
} }
node := new(Filter_Node) node := make_filter_node()
node.type = .Not node.type = .Not
node.child = child node.child = child
return node, true return node, true
@@ -390,7 +392,7 @@ parse_primary_expr :: proc(
return nil, false return nil, false
} }
node := new(Filter_Node) node := make_filter_node()
node.type = .Comparison node.type = .Comparison
node.path = path node.path = path
node.comp_op = comp_op node.comp_op = comp_op
@@ -437,7 +439,7 @@ parse_filter_begins_with :: proc(
return nil, false return nil, false
} }
node := new(Filter_Node) node := make_filter_node()
node.type = .Begins_With node.type = .Begins_With
node.path = path node.path = path
node.value = val node.value = val
@@ -483,7 +485,7 @@ parse_filter_contains :: proc(
return nil, false return nil, false
} }
node := new(Filter_Node) node := make_filter_node()
node.type = .Contains node.type = .Contains
node.path = path node.path = path
node.value = val node.value = val
@@ -514,7 +516,7 @@ parse_filter_attr_exists :: proc(
return nil, false return nil, false
} }
node := new(Filter_Node) node := make_filter_node()
node.type = .Attribute_Exists if exists else .Attribute_Not_Exists node.type = .Attribute_Exists if exists else .Attribute_Not_Exists
node.path = path node.path = path
return node, true return node, true
@@ -552,7 +554,7 @@ parse_filter_between :: proc(
return nil, false return nil, false
} }
node := new(Filter_Node) node := make_filter_node()
node.type = .Between node.type = .Between
node.path = path node.path = path
node.value = lo_val node.value = lo_val
@@ -613,7 +615,7 @@ parse_filter_in :: proc(
} }
} }
node := new(Filter_Node) node := make_filter_node()
node.type = .In node.type = .In
node.path = path node.path = path
node.in_values = in_vals[:] node.in_values = in_vals[:]
@@ -819,3 +821,13 @@ parse_filter_expression_string :: proc(request_body: []byte) -> (expr: string, o
ok = true ok = true
return return
} }
// ============================================================================
// Allocator Helper
// ============================================================================
make_filter_node :: proc() -> ^Filter_Node {
node := new(Filter_Node)
node.allocator = context.allocator
return node
}

View File

@@ -193,10 +193,14 @@ remove_table_lock :: proc(engine: ^Storage_Engine, table_name: string) {
sync.mutex_lock(&engine.table_locks_mutex) sync.mutex_lock(&engine.table_locks_mutex)
defer sync.mutex_unlock(&engine.table_locks_mutex) defer sync.mutex_unlock(&engine.table_locks_mutex)
if lock, found := engine.table_locks[table_name]; found { // Find the actual heap-allocated key string from the map
delete(table_name, engine.allocator) for key, lock in engine.table_locks {
free(lock, engine.allocator) if key == table_name {
delete_key(&engine.table_locks, table_name) delete_key(&engine.table_locks, key)
delete(key, engine.allocator) // free the map's owned key!
free(lock, engine.allocator)
break
}
} }
} }