major changes and feature additions #1

Open
myk wants to merge 11 commits from claude_code_work into main
103 changed files with 11298 additions and 279 deletions

89
CLAUDE.md Normal file
View File

@ -0,0 +1,89 @@
# CLAUDE.md
This file provides guidance to Claude Code (claude.ai/code) when working with code in this repository. Work on this project should follow the priorities defined in [todo.md](todo.md) and the specifications in [spec.md](spec.md).
## Project Overview
Rhizome-node is a distributed, peer-to-peer database engine that implements a rhizomatic (decentralized, non-hierarchical) data model. It synchronizes data across multiple nodes without a central authority using immutable "deltas" as the fundamental unit of change. There is a specification for the behavior of this system in [spec.md](spec.md).
## Development Commands
```bash
# Build the TypeScript project
npm run build
# Build in watch mode
npm run build:watch
# Run tests
npm test
# Run a specific test file
npm test -- __tests__/delta.ts
# Run linter
npm run lint
# Generate coverage report
npm run coverage
# Run the example application
npm run example-app
```
## Architecture Overview
### Core Concepts
1. **Deltas**: Immutable change records that describe modifications to entities. Each delta contains:
- Unique ID and timestamps
- Creator and host information
- Pointers defining entity/property relationships
- DeltaV2 is the current format (DeltaV1 is legacy)
2. **Views**: Different ways to interpret the delta stream:
- **Lossless View**: Stores all deltas without conflict resolution
- **Lossy Views**: Apply conflict resolution (e.g., Last-Write-Wins)
- Custom resolvers can be implemented
3. **Collections**: Group related entities (similar to database tables)
- Support typed collections via `TypedCollection<T>`
- Implement CRUD operations through delta generation
4. **Networking**: Dual transport layer:
- ZeroMQ for efficient binary communication
- libp2p for decentralized peer discovery
- Pub/sub for delta propagation
- Request/reply for synchronization
### Key Files and Entry Points
- `src/node.ts`: Main `RhizomeNode` class orchestrating all components
- `src/delta.ts`: Delta data structures and conversion logic
- `src/lossless.ts`: Core lossless view implementation
- `src/collection-basic.ts`: Basic collection implementation
- `src/http/api.ts`: REST API endpoints
- `src/pub-sub.ts`: Network communication layer
### Testing Patterns
- Unit tests in `__tests__/` directory
- Multi-node integration tests in `__tests__/run/`
- Use Jest with experimental VM modules
- Test files follow pattern: `{feature}.ts`
### HTTP API Structure
The HTTP API provides RESTful endpoints:
- `GET/PUT /collection/:name/:id` - Entity operations
- `GET /peers` - Peer information
- `GET /deltas/stats` - Delta statistics
- `GET /lossless/:entityId` - Raw delta access
### Important Implementation Notes
- All data modifications go through deltas - never modify state directly
- Deltas are immutable once created
- Use `Context.getOrCreate()` for singleton access
- Network ports: publish (default 4000) and request (default 4001)
- Debug logging uses namespaces like `rhizome:*`

View File

@ -1,3 +1,5 @@
See [spec.md](spec.md) for additional specification details about this project.
# Concepts
| | Implemented | Notes |

View File

@ -0,0 +1,542 @@
import {
RhizomeNode,
Lossless,
Delta,
AggregationResolver,
MinResolver,
MaxResolver,
SumResolver,
AverageResolver,
CountResolver,
AggregationType
} from "../src";
describe('Aggregation Resolvers', () => {
let node: RhizomeNode;
let lossless: Lossless;
beforeEach(() => {
node = new RhizomeNode();
lossless = new Lossless(node);
});
describe('Basic Aggregation', () => {
test('should aggregate numbers using min resolver', () => {
// Add first entity with score 10
lossless.ingestDelta(new Delta({
creator: 'test',
host: 'host1',
pointers: [{
localContext: "collection",
target: "entity1",
targetContext: "score"
}, {
localContext: "score",
target: 10
}]
}));
// Add second entity with score 5
lossless.ingestDelta(new Delta({
creator: 'test',
host: 'host1',
pointers: [{
localContext: "collection",
target: "entity2",
targetContext: "score"
}, {
localContext: "score",
target: 5
}]
}));
// Add third entity with score 15
lossless.ingestDelta(new Delta({
creator: 'test',
host: 'host1',
pointers: [{
localContext: "collection",
target: "entity3",
targetContext: "score"
}, {
localContext: "score",
target: 15
}]
}));
const minResolver = new MinResolver(lossless, ['score']);
const result = minResolver.resolve();
expect(result).toBeDefined();
expect(Object.keys(result!)).toHaveLength(3);
expect(result!['entity1'].properties.score).toBe(10);
expect(result!['entity2'].properties.score).toBe(5);
expect(result!['entity3'].properties.score).toBe(15);
});
test('should aggregate numbers using max resolver', () => {
// Add deltas for entities
lossless.ingestDelta(new Delta({
creator: 'test',
host: 'host1',
pointers: [{
localContext: "collection",
target: "entity1",
targetContext: "score"
}, {
localContext: "score",
target: 10
}]
}));
lossless.ingestDelta(new Delta({
creator: 'test',
host: 'host1',
pointers: [{
localContext: "collection",
target: "entity2",
targetContext: "score"
}, {
localContext: "score",
target: 5
}]
}));
lossless.ingestDelta(new Delta({
creator: 'test',
host: 'host1',
pointers: [{
localContext: "collection",
target: "entity3",
targetContext: "score"
}, {
localContext: "score",
target: 15
}]
}));
const maxResolver = new MaxResolver(lossless, ['score']);
const result = maxResolver.resolve();
expect(result).toBeDefined();
expect(result!['entity1'].properties.score).toBe(10);
expect(result!['entity2'].properties.score).toBe(5);
expect(result!['entity3'].properties.score).toBe(15);
});
test('should aggregate numbers using sum resolver', () => {
// Add first value for entity1
lossless.ingestDelta(new Delta({
creator: 'test',
host: 'host1',
pointers: [{
localContext: "collection",
target: "entity1",
targetContext: "value"
}, {
localContext: "value",
target: 10
}]
}));
// Add second value for entity1 (should sum)
lossless.ingestDelta(new Delta({
creator: 'test',
host: 'host1',
pointers: [{
localContext: "collection",
target: "entity1",
targetContext: "value"
}, {
localContext: "value",
target: 20
}]
}));
// Add value for entity2
lossless.ingestDelta(new Delta({
creator: 'test',
host: 'host1',
pointers: [{
localContext: "collection",
target: "entity2",
targetContext: "value"
}, {
localContext: "value",
target: 5
}]
}));
const sumResolver = new SumResolver(lossless, ['value']);
const result = sumResolver.resolve();
expect(result).toBeDefined();
expect(result!['entity1'].properties.value).toBe(30); // 10 + 20
expect(result!['entity2'].properties.value).toBe(5);
});
test('should aggregate numbers using average resolver', () => {
// Add multiple values for entity1
lossless.ingestDelta(new Delta({
creator: 'test',
host: 'host1',
pointers: [{
localContext: "collection",
target: "entity1",
targetContext: "score"
}, {
localContext: "score",
target: 10
}]
}));
lossless.ingestDelta(new Delta({
creator: 'test',
host: 'host1',
pointers: [{
localContext: "collection",
target: "entity1",
targetContext: "score"
}, {
localContext: "score",
target: 20
}]
}));
// Single value for entity2
lossless.ingestDelta(new Delta({
creator: 'test',
host: 'host1',
pointers: [{
localContext: "collection",
target: "entity2",
targetContext: "score"
}, {
localContext: "score",
target: 30
}]
}));
const avgResolver = new AverageResolver(lossless, ['score']);
const result = avgResolver.resolve();
expect(result).toBeDefined();
expect(result!['entity1'].properties.score).toBe(15); // (10 + 20) / 2
expect(result!['entity2'].properties.score).toBe(30);
});
test('should count values using count resolver', () => {
// Add multiple visit deltas for entity1
lossless.ingestDelta(new Delta({
creator: 'test',
host: 'host1',
pointers: [{
localContext: "collection",
target: "entity1",
targetContext: "visits"
}, {
localContext: "visits",
target: 1
}]
}));
lossless.ingestDelta(new Delta({
creator: 'test',
host: 'host1',
pointers: [{
localContext: "collection",
target: "entity1",
targetContext: "visits"
}, {
localContext: "visits",
target: 1
}]
}));
// Single visit for entity2
lossless.ingestDelta(new Delta({
creator: 'test',
host: 'host1',
pointers: [{
localContext: "collection",
target: "entity2",
targetContext: "visits"
}, {
localContext: "visits",
target: 1
}]
}));
const countResolver = new CountResolver(lossless, ['visits']);
const result = countResolver.resolve();
expect(result).toBeDefined();
expect(result!['entity1'].properties.visits).toBe(2); // count of 2 deltas
expect(result!['entity2'].properties.visits).toBe(1); // count of 1 delta
});
});
describe('Custom Aggregation Configuration', () => {
test('should handle mixed aggregation types', () => {
// Add first set of values
lossless.ingestDelta(new Delta({
creator: 'test',
host: 'host1',
pointers: [{
localContext: "collection",
target: "entity1",
targetContext: "min_val"
}, {
localContext: "min_val",
target: 10
}]
}));
lossless.ingestDelta(new Delta({
creator: 'test',
host: 'host1',
pointers: [{
localContext: "collection",
target: "entity1",
targetContext: "max_val"
}, {
localContext: "max_val",
target: 5
}]
}));
lossless.ingestDelta(new Delta({
creator: 'test',
host: 'host1',
pointers: [{
localContext: "collection",
target: "entity1",
targetContext: "sum_val"
}, {
localContext: "sum_val",
target: 3
}]
}));
// Add second set of values
lossless.ingestDelta(new Delta({
creator: 'test',
host: 'host1',
pointers: [{
localContext: "collection",
target: "entity1",
targetContext: "min_val"
}, {
localContext: "min_val",
target: 5
}]
}));
lossless.ingestDelta(new Delta({
creator: 'test',
host: 'host1',
pointers: [{
localContext: "collection",
target: "entity1",
targetContext: "max_val"
}, {
localContext: "max_val",
target: 15
}]
}));
lossless.ingestDelta(new Delta({
creator: 'test',
host: 'host1',
pointers: [{
localContext: "collection",
target: "entity1",
targetContext: "sum_val"
}, {
localContext: "sum_val",
target: 7
}]
}));
const resolver = new AggregationResolver(lossless, {
min_val: 'min' as AggregationType,
max_val: 'max' as AggregationType,
sum_val: 'sum' as AggregationType
});
const result = resolver.resolve();
expect(result).toBeDefined();
const entity = result!['entity1'];
expect(entity.properties.min_val).toBe(5); // min of 10, 5
expect(entity.properties.max_val).toBe(15); // max of 5, 15
expect(entity.properties.sum_val).toBe(10); // sum of 3, 7
});
test('should ignore non-numeric values', () => {
// Add numeric value
lossless.ingestDelta(new Delta({
creator: 'test',
host: 'host1',
pointers: [{
localContext: "collection",
target: "entity1",
targetContext: "score"
}, {
localContext: "score",
target: 10
}]
}));
// Add non-numeric value (string)
lossless.ingestDelta(new Delta({
creator: 'test',
host: 'host1',
pointers: [{
localContext: "collection",
target: "entity1",
targetContext: "name"
}, {
localContext: "name",
target: 'test'
}]
}));
// Add another numeric value
lossless.ingestDelta(new Delta({
creator: 'test',
host: 'host1',
pointers: [{
localContext: "collection",
target: "entity1",
targetContext: "score"
}, {
localContext: "score",
target: 20
}]
}));
const sumResolver = new SumResolver(lossless, ['score', 'name']);
const result = sumResolver.resolve();
expect(result).toBeDefined();
const entity = result!['entity1'];
expect(entity.properties.score).toBe(30); // sum of 10, 20
expect(entity.properties.name).toBe(0); // ignored non-numeric, defaults to 0
});
test('should handle empty value arrays', () => {
// Create entity with non-aggregated property
lossless.ingestDelta(new Delta({
creator: 'test',
host: 'host1',
pointers: [{
localContext: "collection",
target: "entity1",
targetContext: "name"
}, {
localContext: "name",
target: 'test'
}]
}));
const sumResolver = new SumResolver(lossless, ['score']);
const result = sumResolver.resolve();
expect(result).toBeDefined();
// Should not have entity1 since no 'score' property was found
expect(result!['entity1']).toBeUndefined();
});
});
describe('Edge Cases', () => {
test('should handle single value aggregations', () => {
lossless.ingestDelta(new Delta({
creator: 'test',
host: 'host1',
pointers: [{
localContext: "collection",
target: "entity1",
targetContext: "value"
}, {
localContext: "value",
target: 42
}]
}));
const avgResolver = new AverageResolver(lossless, ['value']);
const result = avgResolver.resolve();
expect(result).toBeDefined();
expect(result!['entity1'].properties.value).toBe(42);
});
test('should handle zero values', () => {
lossless.ingestDelta(new Delta({
creator: 'test',
host: 'host1',
pointers: [{
localContext: "collection",
target: "entity1",
targetContext: "value"
}, {
localContext: "value",
target: 0
}]
}));
lossless.ingestDelta(new Delta({
creator: 'test',
host: 'host1',
pointers: [{
localContext: "collection",
target: "entity1",
targetContext: "value"
}, {
localContext: "value",
target: 10
}]
}));
const sumResolver = new SumResolver(lossless, ['value']);
const result = sumResolver.resolve();
expect(result).toBeDefined();
expect(result!['entity1'].properties.value).toBe(10); // 0 + 10
});
test('should handle negative values', () => {
lossless.ingestDelta(new Delta({
creator: 'test',
host: 'host1',
pointers: [{
localContext: "collection",
target: "entity1",
targetContext: "value"
}, {
localContext: "value",
target: -5
}]
}));
lossless.ingestDelta(new Delta({
creator: 'test',
host: 'host1',
pointers: [{
localContext: "collection",
target: "entity1",
targetContext: "value"
}, {
localContext: "value",
target: 10
}]
}));
const minResolver = new MinResolver(lossless, ['value']);
const result = minResolver.resolve();
expect(result).toBeDefined();
expect(result!['entity1'].properties.value).toBe(-5);
});
});
});

View File

@ -0,0 +1,247 @@
import * as _RhizomeImports from "../src";
Review

This line appears to do nothing

This line appears to do nothing
/**
* Tests for lossless view compose() and decompose() bidirectional conversion
* Ensures that deltas can be composed into lossless views and decomposed back
* to the original deltas with all pointer relationships preserved.
*/
import { RhizomeNode } from '../src/node';
import { Delta } from '../src/core';
describe('Lossless View Compose/Decompose', () => {
let node: RhizomeNode;
beforeEach(() => {
node = new RhizomeNode();
});
describe('Bidirectional Conversion', () => {
it('should compose and decompose simple entity deltas correctly', () => {
// Create simple entity deltas
const nameDeltas = [
new Delta({
creator: 'test-creator',
host: 'test-host',
pointers: [
{ localContext: 'users', target: 'alice', targetContext: 'name' },
{ localContext: 'name', target: 'Alice Smith' }
]
}),
new Delta({
creator: 'test-creator',
host: 'test-host',
pointers: [
{ localContext: 'users', target: 'alice', targetContext: 'email' },
{ localContext: 'email', target: 'alice@example.com' }
]
})
];
// Ingest the deltas
nameDeltas.forEach(delta => node.lossless.ingestDelta(delta));
// Compose lossless view
const composed = node.lossless.compose(['alice']);
const aliceView = composed['alice'];
expect(aliceView).toBeDefined();
expect(aliceView.id).toBe('alice');
expect(aliceView.propertyDeltas.name).toHaveLength(1);
expect(aliceView.propertyDeltas.email).toHaveLength(1);
// Decompose back to deltas
const decomposed = node.lossless.decompose(aliceView);
expect(decomposed).toHaveLength(2);
// Check that original deltas are preserved
const originalIds = nameDeltas.map(d => d.id).sort();
const decomposedIds = decomposed.map(d => d.id).sort();
expect(decomposedIds).toEqual(originalIds);
// Verify pointer structure is preserved
const nameDataDelta = decomposed.find(d =>
d.pointers.some(p => p.localContext === 'name' && p.target === 'Alice Smith')
);
expect(nameDataDelta).toBeDefined();
expect(nameDataDelta?.pointers).toHaveLength(2);
const upPointer = nameDataDelta?.pointers.find(p => p.targetContext === 'name');
expect(upPointer).toBeDefined();
expect(upPointer?.target).toBe('alice');
expect(upPointer?.localContext).toBe('users');
});
it('should handle multi-pointer relationship deltas correctly', () => {
// Create a complex relationship delta
const relationshipDelta = new Delta({
creator: 'test-creator',
host: 'test-host',
pointers: [
{ localContext: 'users', target: 'alice', targetContext: 'relationships' },
{ localContext: 'partner', target: 'bob' },
{ localContext: 'type', target: 'friendship' },
{ localContext: 'since', target: '2020-01-15' },
{ localContext: 'intensity', target: 8 }
]
});
node.lossless.ingestDelta(relationshipDelta);
// Compose and decompose
const composed = node.lossless.compose(['alice']);
const aliceView = composed['alice'];
const decomposed = node.lossless.decompose(aliceView);
expect(decomposed).toHaveLength(1);
const reconstituted = decomposed[0];
// Should have all 5 pointers
expect(reconstituted.pointers).toHaveLength(5);
// Check that all pointer types are preserved
const contexts = reconstituted.pointers.map(p => p.localContext).sort();
expect(contexts).toEqual(['users', 'partner', 'type', 'since', 'intensity'].sort());
// Check that the "up" pointer to alice is correctly reconstructed
const upPointer = reconstituted.pointers.find(p => p.targetContext === 'relationships');
expect(upPointer).toBeDefined();
expect(upPointer?.target).toBe('alice');
expect(upPointer?.localContext).toBe('users');
// Check scalar values are preserved
const intensityPointer = reconstituted.pointers.find(p => p.localContext === 'intensity');
expect(intensityPointer?.target).toBe(8);
});
it('should handle reference relationships correctly', () => {
// Create entities first
const aliceDelta = new Delta({
creator: 'test-creator',
host: 'test-host',
pointers: [
{ localContext: 'users', target: 'alice', targetContext: 'name' },
{ localContext: 'name', target: 'Alice' }
]
});
const bobDelta = new Delta({
creator: 'test-creator',
host: 'test-host',
pointers: [
{ localContext: 'users', target: 'bob', targetContext: 'name' },
{ localContext: 'name', target: 'Bob' }
]
});
// Create friendship relationship
const friendshipDelta = new Delta({
creator: 'test-creator',
host: 'test-host',
pointers: [
{ localContext: 'users', target: 'alice', targetContext: 'friends' },
{ localContext: 'friend', target: 'bob', targetContext: 'friends' }
]
});
[aliceDelta, bobDelta, friendshipDelta].forEach(d => node.lossless.ingestDelta(d));
// Compose Alice's view
const composed = node.lossless.compose(['alice']);
const aliceView = composed['alice'];
expect(aliceView.propertyDeltas.friends).toHaveLength(1);
// Decompose and verify the friendship delta is correctly reconstructed
const decomposed = node.lossless.decompose(aliceView);
const friendshipReconstituted = decomposed.find(d =>
d.pointers.some(p => p.localContext === 'friend')
);
expect(friendshipReconstituted).toBeDefined();
expect(friendshipReconstituted?.pointers).toHaveLength(2);
// Check both reference pointers are preserved
const alicePointer = friendshipReconstituted?.pointers.find(p => p.target === 'alice');
const bobPointer = friendshipReconstituted?.pointers.find(p => p.target === 'bob');
expect(alicePointer).toBeDefined();
expect(alicePointer?.targetContext).toBe('friends');
expect(bobPointer).toBeDefined();
expect(bobPointer?.targetContext).toBe('friends');
});
it('should preserve delta metadata correctly', () => {
const originalDelta = new Delta({
creator: 'test-creator',
host: 'test-host',
pointers: [
{ localContext: 'users', target: 'alice', targetContext: 'name' },
{ localContext: 'name', target: 'Alice' }
]
});
node.lossless.ingestDelta(originalDelta);
const composed = node.lossless.compose(['alice']);
const decomposed = node.lossless.decompose(composed['alice']);
expect(decomposed).toHaveLength(1);
const reconstituted = decomposed[0];
// Check metadata preservation
expect(reconstituted.id).toBe(originalDelta.id);
expect(reconstituted.creator).toBe(originalDelta.creator);
expect(reconstituted.host).toBe(originalDelta.host);
expect(reconstituted.timeCreated).toBe(originalDelta.timeCreated);
});
it('should handle multiple deltas for the same property', () => {
// Create multiple name changes for alice
const nameDeltas = [
new Delta({
creator: 'test-creator',
host: 'test-host',
pointers: [
{ localContext: 'users', target: 'alice', targetContext: 'name' },
{ localContext: 'name', target: 'Alice' }
]
}),
new Delta({
creator: 'test-creator',
host: 'test-host',
pointers: [
{ localContext: 'users', target: 'alice', targetContext: 'name' },
{ localContext: 'name', target: 'Alice Smith' }
]
}),
new Delta({
creator: 'test-creator',
host: 'test-host',
pointers: [
{ localContext: 'users', target: 'alice', targetContext: 'name' },
{ localContext: 'name', target: 'Alice Johnson' }
]
})
];
nameDeltas.forEach(d => node.lossless.ingestDelta(d));
const composed = node.lossless.compose(['alice']);
const aliceView = composed['alice'];
// Should have 3 deltas for the name property
expect(aliceView.propertyDeltas.name).toHaveLength(3);
const decomposed = node.lossless.decompose(aliceView);
// Should decompose back to 3 separate deltas
expect(decomposed).toHaveLength(3);
// All original delta IDs should be preserved
const originalIds = nameDeltas.map(d => d.id).sort();
const decomposedIds = decomposed.map(d => d.id).sort();
expect(decomposedIds).toEqual(originalIds);
});
});
});

View File

@ -0,0 +1,559 @@
import {
RhizomeNode,
Lossless,
Delta,
LastWriteWins,
TimestampResolver,
SumResolver,
CustomResolver,
LastWriteWinsPlugin,
MajorityVotePlugin
} from "../src";
describe('Concurrent Write Scenarios', () => {
let node: RhizomeNode;
let lossless: Lossless;
beforeEach(() => {
node = new RhizomeNode();
lossless = new Lossless(node);
});
describe('Simultaneous Writes with Same Timestamp', () => {
test('should handle simultaneous writes using last-write-wins resolver', () => {
const timestamp = 1000;
// Simulate two writers updating the same property at the exact same time
lossless.ingestDelta(new Delta({
creator: 'writer1',
host: 'host1',
id: 'delta-a',
timeCreated: timestamp,
pointers: [{
localContext: "collection",
target: "entity1",
targetContext: "score"
}, {
localContext: "score",
target: 100
}]
}));
lossless.ingestDelta(new Delta({
creator: 'writer2',
host: 'host2',
id: 'delta-b',
timeCreated: timestamp, // Same timestamp
pointers: [{
localContext: "collection",
target: "entity1",
targetContext: "score"
}, {
localContext: "score",
target: 200
}]
}));
const resolver = new LastWriteWins(lossless);
const result = resolver.resolve();
expect(result).toBeDefined();
// Should resolve deterministically (likely based on delta processing order)
Review

Comment is outdated, this seems to use a more robust tie breaking algorithm, not relying on processing order

Comment is outdated, this seems to use a more robust tie breaking algorithm, not relying on processing order
expect(typeof result!['entity1'].properties.score).toBe('number');
expect([100, 200]).toContain(result!['entity1'].properties.score);
});
test('should handle simultaneous writes using timestamp resolver with tie-breaking', () => {
const timestamp = 1000;
lossless.ingestDelta(new Delta({
creator: 'writer_z', // Lexicographically later
host: 'host1',
id: 'delta-a',
timeCreated: timestamp,
pointers: [{
localContext: "collection",
target: "entity1",
targetContext: "score"
}, {
localContext: "score",
target: 100
}]
}));
lossless.ingestDelta(new Delta({
creator: 'writer_a', // Lexicographically earlier
host: 'host2',
id: 'delta-b',
timeCreated: timestamp, // Same timestamp
pointers: [{
localContext: "collection",
target: "entity1",
targetContext: "score"
}, {
localContext: "score",
target: 200
}]
}));
const resolver = new TimestampResolver(lossless, 'creator-id');
const result = resolver.resolve();
expect(result).toBeDefined();
// writer_z should win due to lexicographic ordering
expect(result!['entity1'].properties.score).toBe(100);
});
test('should handle multiple writers with aggregation resolver', () => {
const timestamp = 1000;
// Multiple writers add values simultaneously
lossless.ingestDelta(new Delta({
creator: 'writer1',
host: 'host1',
timeCreated: timestamp,
pointers: [{
localContext: "collection",
target: "entity1",
targetContext: "points"
}, {
localContext: "points",
target: 10
}]
}));
lossless.ingestDelta(new Delta({
creator: 'writer2',
host: 'host2',
timeCreated: timestamp,
pointers: [{
localContext: "collection",
target: "entity1",
targetContext: "points"
}, {
localContext: "points",
target: 20
}]
}));
lossless.ingestDelta(new Delta({
creator: 'writer3',
host: 'host3',
timeCreated: timestamp,
pointers: [{
localContext: "collection",
target: "entity1",
targetContext: "points"
}, {
localContext: "points",
target: 30
}]
}));
const resolver = new SumResolver(lossless, ['points']);
const result = resolver.resolve();
expect(result).toBeDefined();
// All values should be summed regardless of timing
expect(result!['entity1'].properties.points).toBe(60); // 10 + 20 + 30
});
});
describe('Out-of-Order Write Arrival', () => {
test('should handle writes arriving out of chronological order', () => {
// Newer delta arrives first
lossless.ingestDelta(new Delta({
creator: 'writer1',
host: 'host1',
timeCreated: 2000, // Later timestamp
pointers: [{
localContext: "collection",
target: "entity1",
targetContext: "value"
}, {
localContext: "value",
target: 'newer'
}]
}));
// Older delta arrives later
lossless.ingestDelta(new Delta({
creator: 'writer1',
host: 'host1',
timeCreated: 1000, // Earlier timestamp
pointers: [{
localContext: "collection",
target: "entity1",
targetContext: "value"
}, {
localContext: "value",
target: 'older'
}]
}));
const resolver = new LastWriteWins(lossless);
const result = resolver.resolve();
expect(result).toBeDefined();
// Should still resolve to the chronologically newer value
expect(result!['entity1'].properties.value).toBe('newer');
});
test('should maintain correct aggregation despite out-of-order arrival', () => {
// Add deltas in reverse chronological order
lossless.ingestDelta(new Delta({
creator: 'writer1',
host: 'host1',
timeCreated: 3000,
pointers: [{
localContext: "collection",
target: "entity1",
targetContext: "score"
}, {
localContext: "score",
target: 30
}]
}));
lossless.ingestDelta(new Delta({
creator: 'writer1',
host: 'host1',
timeCreated: 1000,
pointers: [{
localContext: "collection",
target: "entity1",
targetContext: "score"
}, {
localContext: "score",
target: 10
}]
}));
lossless.ingestDelta(new Delta({
creator: 'writer1',
host: 'host1',
timeCreated: 2000,
pointers: [{
localContext: "collection",
target: "entity1",
targetContext: "score"
}, {
localContext: "score",
target: 20
}]
}));
const resolver = new SumResolver(lossless, ['score']);
const result = resolver.resolve();
expect(result).toBeDefined();
// Sum should be correct regardless of arrival order
expect(result!['entity1'].properties.score).toBe(60); // 10 + 20 + 30
});
});
describe('High-Frequency Concurrent Updates', () => {
test('should handle rapid concurrent updates to the same entity', () => {
const baseTimestamp = 1000;
const numWriters = 10;
const writesPerWriter = 5;
// Simulate multiple writers making rapid updates
for (let writer = 0; writer < numWriters; writer++) {
for (let write = 0; write < writesPerWriter; write++) {
lossless.ingestDelta(new Delta({
creator: `writer${writer}`,
host: `host${writer}`,
timeCreated: baseTimestamp + write, // Small time increments
pointers: [{
localContext: "collection",
target: "entity1",
targetContext: "counter"
}, {
localContext: "counter",
target: 1 // Each update adds 1
}]
}));
}
}
const resolver = new SumResolver(lossless, ['counter']);
const result = resolver.resolve();
expect(result).toBeDefined();
// Should count all updates
expect(result!['entity1'].properties.counter).toBe(numWriters * writesPerWriter);
});
test('should handle concurrent updates to multiple properties', () => {
const timestamp = 1000;
// Writer 1 updates name and score
lossless.ingestDelta(new Delta({
creator: 'writer1',
host: 'host1',
timeCreated: timestamp,
pointers: [{
localContext: "collection",
target: "entity1",
targetContext: "name"
}, {
localContext: "name",
target: 'alice'
}]
}));
lossless.ingestDelta(new Delta({
creator: 'writer1',
host: 'host1',
timeCreated: timestamp + 1,
pointers: [{
localContext: "collection",
target: "entity1",
targetContext: "score"
}, {
localContext: "score",
target: 100
}]
}));
// Writer 2 updates name and score concurrently
lossless.ingestDelta(new Delta({
creator: 'writer2',
host: 'host2',
timeCreated: timestamp + 2,
pointers: [{
localContext: "collection",
target: "entity1",
targetContext: "name"
}, {
localContext: "name",
target: 'bob'
}]
}));
lossless.ingestDelta(new Delta({
creator: 'writer2',
host: 'host2',
timeCreated: timestamp + 3,
pointers: [{
localContext: "collection",
target: "entity1",
targetContext: "score"
}, {
localContext: "score",
target: 200
}]
}));
const resolver = new CustomResolver(lossless, {
name: new LastWriteWinsPlugin(),
score: new LastWriteWinsPlugin()
});
const result = resolver.resolve();
expect(result).toBeDefined();
expect(result!['entity1'].properties.name).toBe('bob'); // Later timestamp
expect(result!['entity1'].properties.score).toBe(200); // Later timestamp
});
});
describe('Cross-Entity Concurrent Writes', () => {
test('should handle concurrent writes to different entities', () => {
const timestamp = 1000;
// Multiple writers updating different entities simultaneously
for (let i = 0; i < 5; i++) {
lossless.ingestDelta(new Delta({
creator: `writer${i}`,
host: `host${i}`,
timeCreated: timestamp,
pointers: [{
localContext: "collection",
target: `entity${i}`,
targetContext: "value"
}, {
localContext: "value",
target: (i + 1) * 10 // Start from 10 to avoid 0 values
}]
}));
}
const resolver = new LastWriteWins(lossless);
const result = resolver.resolve();
expect(result).toBeDefined();
expect(Object.keys(result!)).toHaveLength(5);
for (let i = 0; i < 5; i++) {
expect(result![`entity${i}`].properties.value).toBe((i + 1) * 10);
}
});
test('should handle mixed entity and property conflicts', () => {
const timestamp = 1000;
// Entity1: Multiple writers competing for same property
lossless.ingestDelta(new Delta({
creator: 'writer1',
host: 'host1',
timeCreated: timestamp,
pointers: [{
localContext: "collection",
target: "entity1",
targetContext: "votes"
}, {
localContext: "votes",
target: 'option_a'
}]
}));
lossless.ingestDelta(new Delta({
creator: 'writer2',
host: 'host2',
timeCreated: timestamp,
pointers: [{
localContext: "collection",
target: "entity1",
targetContext: "votes"
}, {
localContext: "votes",
target: 'option_a'
}]
}));
lossless.ingestDelta(new Delta({
creator: 'writer3',
host: 'host3',
timeCreated: timestamp,
pointers: [{
localContext: "collection",
target: "entity1",
targetContext: "votes"
}, {
localContext: "votes",
target: 'option_b'
}]
}));
// Entity2: Single writer, no conflict
lossless.ingestDelta(new Delta({
creator: 'writer4',
host: 'host4',
timeCreated: timestamp,
pointers: [{
localContext: "collection",
target: "entity2",
targetContext: "status"
}, {
localContext: "status",
target: 'active'
}]
}));
const resolver = new CustomResolver(lossless, {
votes: new MajorityVotePlugin(),
status: new LastWriteWinsPlugin()
});
const result = resolver.resolve();
expect(result).toBeDefined();
expect(result!['entity1'].properties.votes).toBe('option_a'); // 2 votes vs 1
expect(result!['entity2'].properties.status).toBe('active');
});
});
describe('Stress Testing', () => {
test('should handle large number of concurrent writes efficiently', () => {
const numEntities = 100;
const numWritersPerEntity = 10;
const baseTimestamp = 1000;
// Generate a large number of concurrent writes
for (let entity = 0; entity < numEntities; entity++) {
for (let writer = 0; writer < numWritersPerEntity; writer++) {
lossless.ingestDelta(new Delta({
creator: `writer${writer}`,
host: `host${writer}`,
timeCreated: baseTimestamp + Math.floor(Math.random() * 1000), // Random timestamps
pointers: [{
localContext: "collection",
target: `entity${entity}`,
targetContext: "score"
}, {
localContext: "score",
target: Math.floor(Math.random() * 100) // Random scores
}]
}));
}
}
const resolver = new SumResolver(lossless, ['score']);
const result = resolver.resolve();
expect(result).toBeDefined();
expect(Object.keys(result!)).toHaveLength(numEntities);
// Each entity should have a score (sum of all writer contributions)
for (let entity = 0; entity < numEntities; entity++) {
expect(result![`entity${entity}`]).toBeDefined();
expect(typeof result![`entity${entity}`].properties.score).toBe('number');
expect(result![`entity${entity}`].properties.score).toBeGreaterThan(0);
}
});
test('should maintain consistency under rapid updates and resolution calls', () => {
const entityId = 'stress-test-entity';
let updateCount = 0;
// Add initial deltas
for (let i = 0; i < 50; i++) {
lossless.ingestDelta(new Delta({
creator: `writer${i % 5}`,
host: `host${i % 3}`,
timeCreated: 1000 + i,
pointers: [{
localContext: "collection",
target: entityId,
targetContext: "counter"
}, {
localContext: "counter",
target: 1
}]
}));
updateCount++;
}
// Verify initial state
let resolver = new SumResolver(lossless, ['counter']);
let result = resolver.resolve();
expect(result).toBeDefined();
expect(result![entityId].properties.counter).toBe(updateCount);
// Add more deltas and verify consistency
for (let i = 0; i < 25; i++) {
lossless.ingestDelta(new Delta({
creator: 'late-writer',
host: 'late-host',
timeCreated: 2000 + i,
pointers: [{
localContext: "collection",
target: entityId,
targetContext: "counter"
}, {
localContext: "counter",
target: 2
}]
}));
updateCount += 2;
// Create a fresh resolver to avoid accumulator caching issues
resolver = new SumResolver(lossless, ['counter']);
result = resolver.resolve();
expect(result![entityId].properties.counter).toBe(updateCount);
}
});
});
});

View File

@ -0,0 +1,676 @@
import {
RhizomeNode,
Lossless,
Delta,
CustomResolver,
ResolverPlugin,
LastWriteWinsPlugin,
FirstWriteWinsPlugin,
ConcatenationPlugin,
MajorityVotePlugin,
MinPlugin,
MaxPlugin,
PropertyTypes,
CollapsedDelta
} from "../src";
describe('Custom Resolvers', () => {
let node: RhizomeNode;
let lossless: Lossless;
beforeEach(() => {
node = new RhizomeNode();
lossless = new Lossless(node);
});
describe('Built-in Plugins', () => {
test('LastWriteWinsPlugin should resolve to most recent value', () => {
lossless.ingestDelta(new Delta({
creator: 'user1',
host: 'host1',
timeCreated: 1000,
pointers: [{
localContext: "collection",
target: "entity1",
targetContext: "name"
}, {
localContext: "name",
target: 'first'
}]
}));
lossless.ingestDelta(new Delta({
creator: 'user1',
host: 'host1',
timeCreated: 2000,
pointers: [{
localContext: "collection",
target: "entity1",
targetContext: "name"
}, {
localContext: "name",
target: 'second'
}]
}));
const resolver = new CustomResolver(lossless, {
name: new LastWriteWinsPlugin()
});
const result = resolver.resolve();
expect(result).toBeDefined();
expect(result!['entity1'].properties.name).toBe('second');
});
test('FirstWriteWinsPlugin should resolve to earliest value', () => {
lossless.ingestDelta(new Delta({
creator: 'user1',
host: 'host1',
timeCreated: 2000,
pointers: [{
localContext: "collection",
target: "entity1",
targetContext: "name"
}, {
localContext: "name",
target: 'second'
}]
}));
lossless.ingestDelta(new Delta({
creator: 'user1',
host: 'host1',
timeCreated: 1000,
pointers: [{
localContext: "collection",
target: "entity1",
targetContext: "name"
}, {
localContext: "name",
target: 'first'
}]
}));
const resolver = new CustomResolver(lossless, {
name: new FirstWriteWinsPlugin()
});
const result = resolver.resolve();
expect(result).toBeDefined();
expect(result!['entity1'].properties.name).toBe('first');
});
test('ConcatenationPlugin should join string values chronologically', () => {
lossless.ingestDelta(new Delta({
creator: 'user1',
host: 'host1',
timeCreated: 1000,
pointers: [{
localContext: "collection",
target: "entity1",
targetContext: "tags"
}, {
localContext: "tags",
target: 'red'
}]
}));
lossless.ingestDelta(new Delta({
creator: 'user1',
host: 'host1',
timeCreated: 3000,
pointers: [{
localContext: "collection",
target: "entity1",
targetContext: "tags"
}, {
localContext: "tags",
target: 'blue'
}]
}));
lossless.ingestDelta(new Delta({
creator: 'user1',
host: 'host1',
timeCreated: 2000,
pointers: [{
localContext: "collection",
target: "entity1",
targetContext: "tags"
}, {
localContext: "tags",
target: 'green'
}]
}));
const resolver = new CustomResolver(lossless, {
tags: new ConcatenationPlugin(' ')
});
const result = resolver.resolve();
expect(result).toBeDefined();
expect(result!['entity1'].properties.tags).toBe('red green blue');
});
test('ConcatenationPlugin should handle duplicates', () => {
lossless.ingestDelta(new Delta({
creator: 'user1',
host: 'host1',
timeCreated: 1000,
pointers: [{
localContext: "collection",
target: "entity1",
targetContext: "tags"
}, {
localContext: "tags",
target: 'red'
}]
}));
lossless.ingestDelta(new Delta({
creator: 'user1',
host: 'host1',
timeCreated: 2000,
pointers: [{
localContext: "collection",
target: "entity1",
targetContext: "tags"
}, {
localContext: "tags",
target: 'red' // duplicate
}]
}));
const resolver = new CustomResolver(lossless, {
tags: new ConcatenationPlugin(',')
});
const result = resolver.resolve();
expect(result).toBeDefined();
expect(result!['entity1'].properties.tags).toBe('red'); // Should not duplicate
});
test('MajorityVotePlugin should resolve to most voted value', () => {
// Add 3 votes for 'red'
lossless.ingestDelta(new Delta({
creator: 'user1',
host: 'host1',
timeCreated: 1000,
pointers: [{
localContext: "collection",
target: "entity1",
targetContext: "color"
}, {
localContext: "color",
target: 'red'
}]
}));
lossless.ingestDelta(new Delta({
creator: 'user2',
host: 'host1',
timeCreated: 1001,
pointers: [{
localContext: "collection",
target: "entity1",
targetContext: "color"
}, {
localContext: "color",
target: 'red'
}]
}));
lossless.ingestDelta(new Delta({
creator: 'user3',
host: 'host1',
timeCreated: 1002,
pointers: [{
localContext: "collection",
target: "entity1",
targetContext: "color"
}, {
localContext: "color",
target: 'red'
}]
}));
// Add 2 votes for 'blue'
lossless.ingestDelta(new Delta({
creator: 'user4',
host: 'host1',
timeCreated: 1003,
pointers: [{
localContext: "collection",
target: "entity1",
targetContext: "color"
}, {
localContext: "color",
target: 'blue'
}]
}));
lossless.ingestDelta(new Delta({
creator: 'user5',
host: 'host1',
timeCreated: 1004,
pointers: [{
localContext: "collection",
target: "entity1",
targetContext: "color"
}, {
localContext: "color",
target: 'blue'
}]
}));
const resolver = new CustomResolver(lossless, {
color: new MajorityVotePlugin()
});
const result = resolver.resolve();
expect(result).toBeDefined();
expect(result!['entity1'].properties.color).toBe('red'); // 3 votes vs 2 votes
});
test('MinPlugin should resolve to minimum numeric value', () => {
lossless.ingestDelta(new Delta({
creator: 'user1',
host: 'host1',
timeCreated: 1000,
pointers: [{
localContext: "collection",
target: "entity1",
targetContext: "score"
}, {
localContext: "score",
target: 100
}]
}));
lossless.ingestDelta(new Delta({
creator: 'user1',
host: 'host1',
timeCreated: 2000,
pointers: [{
localContext: "collection",
target: "entity1",
targetContext: "score"
}, {
localContext: "score",
target: 50
}]
}));
lossless.ingestDelta(new Delta({
creator: 'user1',
host: 'host1',
timeCreated: 3000,
pointers: [{
localContext: "collection",
target: "entity1",
targetContext: "score"
}, {
localContext: "score",
target: 75
}]
}));
const resolver = new CustomResolver(lossless, {
score: new MinPlugin()
});
const result = resolver.resolve();
expect(result).toBeDefined();
expect(result!['entity1'].properties.score).toBe(50);
});
test('MaxPlugin should resolve to maximum numeric value', () => {
lossless.ingestDelta(new Delta({
creator: 'user1',
host: 'host1',
timeCreated: 1000,
pointers: [{
localContext: "collection",
target: "entity1",
targetContext: "score"
}, {
localContext: "score",
target: 100
}]
}));
lossless.ingestDelta(new Delta({
creator: 'user1',
host: 'host1',
timeCreated: 2000,
pointers: [{
localContext: "collection",
target: "entity1",
targetContext: "score"
}, {
localContext: "score",
target: 150
}]
}));
lossless.ingestDelta(new Delta({
creator: 'user1',
host: 'host1',
timeCreated: 3000,
pointers: [{
localContext: "collection",
target: "entity1",
targetContext: "score"
}, {
localContext: "score",
target: 75
}]
}));
const resolver = new CustomResolver(lossless, {
score: new MaxPlugin()
});
const result = resolver.resolve();
expect(result).toBeDefined();
expect(result!['entity1'].properties.score).toBe(150);
});
});
describe('Mixed Plugin Configurations', () => {
test('should handle different plugins for different properties', () => {
// Add name with different timestamps
lossless.ingestDelta(new Delta({
creator: 'user1',
host: 'host1',
timeCreated: 1000,
pointers: [{
localContext: "collection",
target: "entity1",
targetContext: "name"
}, {
localContext: "name",
target: 'old_name'
}]
}));
lossless.ingestDelta(new Delta({
creator: 'user1',
host: 'host1',
timeCreated: 2000,
pointers: [{
localContext: "collection",
target: "entity1",
targetContext: "name"
}, {
localContext: "name",
target: 'new_name'
}]
}));
// Add scores
lossless.ingestDelta(new Delta({
creator: 'user1',
host: 'host1',
timeCreated: 1000,
pointers: [{
localContext: "collection",
target: "entity1",
targetContext: "score"
}, {
localContext: "score",
target: 100
}]
}));
lossless.ingestDelta(new Delta({
creator: 'user1',
host: 'host1',
timeCreated: 2000,
pointers: [{
localContext: "collection",
target: "entity1",
targetContext: "score"
}, {
localContext: "score",
target: 50
}]
}));
const resolver = new CustomResolver(lossless, {
name: new LastWriteWinsPlugin(), // Should resolve to 'new_name'
score: new MinPlugin() // Should resolve to 50
});
const result = resolver.resolve();
expect(result).toBeDefined();
expect(result!['entity1'].properties.name).toBe('new_name');
expect(result!['entity1'].properties.score).toBe(50);
});
test('should only include entities with configured properties', () => {
// Entity1 has configured property
lossless.ingestDelta(new Delta({
creator: 'user1',
host: 'host1',
timeCreated: 1000,
pointers: [{
localContext: "collection",
target: "entity1",
targetContext: "name"
}, {
localContext: "name",
target: 'test'
}]
}));
// Entity2 has non-configured property
lossless.ingestDelta(new Delta({
creator: 'user1',
host: 'host1',
timeCreated: 1000,
pointers: [{
localContext: "collection",
target: "entity2",
targetContext: "other"
}, {
localContext: "other",
target: 'value'
}]
}));
const resolver = new CustomResolver(lossless, {
name: new LastWriteWinsPlugin()
});
const result = resolver.resolve();
expect(result).toBeDefined();
expect(result!['entity1']).toBeDefined();
expect(result!['entity1'].properties.name).toBe('test');
expect(result!['entity2']).toBeUndefined(); // No configured properties
});
});
describe('Custom Plugin Implementation', () => {
test('should work with custom plugin', () => {
// Custom plugin that counts the number of updates
class CountPlugin implements ResolverPlugin<{count: number}> {
name = 'count';
initialize() {
return {count: 0};
}
update(currentState: {count: number}, _newValue: PropertyTypes, _delta: CollapsedDelta) {
return {count: currentState.count + 1};
}
resolve(state: {count: number}): PropertyTypes {
return state.count;
}
}
lossless.ingestDelta(new Delta({
creator: 'user1',
host: 'host1',
timeCreated: 1000,
pointers: [{
localContext: "collection",
target: "entity1",
targetContext: "updates"
}, {
localContext: "updates",
target: 'first'
}]
}));
lossless.ingestDelta(new Delta({
creator: 'user1',
host: 'host1',
timeCreated: 2000,
pointers: [{
localContext: "collection",
target: "entity1",
targetContext: "updates"
}, {
localContext: "updates",
target: 'second'
}]
}));
lossless.ingestDelta(new Delta({
creator: 'user1',
host: 'host1',
timeCreated: 3000,
pointers: [{
localContext: "collection",
target: "entity1",
targetContext: "updates"
}, {
localContext: "updates",
target: 'third'
}]
}));
const resolver = new CustomResolver(lossless, {
updates: new CountPlugin()
});
const result = resolver.resolve();
expect(result).toBeDefined();
expect(result!['entity1'].properties.updates).toBe(3);
});
test('should work with stateful custom plugin', () => {
// Custom plugin that calculates running average
class RunningAveragePlugin implements ResolverPlugin<{sum: number, count: number}> {
name = 'running-average';
initialize() {
return {sum: 0, count: 0};
}
update(currentState: {sum: number, count: number}, newValue: PropertyTypes, _delta: CollapsedDelta) {
if (typeof newValue === 'number') {
return {
sum: currentState.sum + newValue,
count: currentState.count + 1
};
}
return currentState;
}
resolve(state: {sum: number, count: number}): PropertyTypes {
return state.count > 0 ? state.sum / state.count : 0;
}
}
lossless.ingestDelta(new Delta({
creator: 'user1',
host: 'host1',
timeCreated: 1000,
pointers: [{
localContext: "collection",
target: "entity1",
targetContext: "score"
}, {
localContext: "score",
target: 10
}]
}));
lossless.ingestDelta(new Delta({
creator: 'user1',
host: 'host1',
timeCreated: 2000,
pointers: [{
localContext: "collection",
target: "entity1",
targetContext: "score"
}, {
localContext: "score",
target: 20
}]
}));
lossless.ingestDelta(new Delta({
creator: 'user1',
host: 'host1',
timeCreated: 3000,
pointers: [{
localContext: "collection",
target: "entity1",
targetContext: "score"
}, {
localContext: "score",
target: 30
}]
}));
const resolver = new CustomResolver(lossless, {
score: new RunningAveragePlugin()
});
const result = resolver.resolve();
expect(result).toBeDefined();
expect(result!['entity1'].properties.score).toBe(20); // (10 + 20 + 30) / 3
});
});
describe('Edge Cases', () => {
test('should handle empty delta sets', () => {
const resolver = new CustomResolver(lossless, {
name: new LastWriteWinsPlugin()
});
const result = resolver.resolve();
expect(result).toBeDefined();
expect(Object.keys(result!)).toHaveLength(0);
});
test('should handle non-matching property types gracefully', () => {
// Add string value to numeric plugin
lossless.ingestDelta(new Delta({
creator: 'user1',
host: 'host1',
timeCreated: 1000,
pointers: [{
localContext: "collection",
target: "entity1",
targetContext: "score"
}, {
localContext: "score",
target: 'not_a_number'
}]
}));
const resolver = new CustomResolver(lossless, {
score: new MinPlugin() // Expects numeric values
});
const result = resolver.resolve();
expect(result).toBeDefined();
expect(result!['entity1'].properties.score).toBe(0); // Default value
});
});
});

View File

@ -0,0 +1,358 @@
import {
DeltaV1,
DeltaV2,
InvalidDeltaFormatError,
MissingRequiredFieldError,
InvalidPointerError,
validateDeltaNetworkImageV1,
validateDeltaNetworkImageV2
} from "../src";
describe("Delta Validation", () => {
describe("Invalid Delta Formats", () => {
describe("DeltaV1 validation", () => {
it("should throw error for non-object delta", () => {
expect(() => validateDeltaNetworkImageV1(null)).toThrow(InvalidDeltaFormatError);
expect(() => validateDeltaNetworkImageV1("string")).toThrow(InvalidDeltaFormatError);
expect(() => validateDeltaNetworkImageV1(123)).toThrow(InvalidDeltaFormatError);
expect(() => validateDeltaNetworkImageV1([])).toThrow(InvalidDeltaFormatError);
});
it("should throw error for invalid ID types", () => {
const invalidDeltas = [
{ id: null, timeCreated: 123, host: "host", creator: "creator", pointers: [] },
{ id: 123, timeCreated: 123, host: "host", creator: "creator", pointers: [] },
{ id: "", timeCreated: 123, host: "host", creator: "creator", pointers: [] },
{ id: {}, timeCreated: 123, host: "host", creator: "creator", pointers: [] }
];
invalidDeltas.forEach(delta => {
expect(() => validateDeltaNetworkImageV1(delta)).toThrow(InvalidDeltaFormatError);
});
});
it("should throw error for invalid timestamp", () => {
const invalidDeltas = [
{ id: "id", timeCreated: "123", host: "host", creator: "creator", pointers: [] },
{ id: "id", timeCreated: -123, host: "host", creator: "creator", pointers: [] },
{ id: "id", timeCreated: 0, host: "host", creator: "creator", pointers: [] },
{ id: "id", timeCreated: null, host: "host", creator: "creator", pointers: [] }
];
invalidDeltas.forEach(delta => {
expect(() => validateDeltaNetworkImageV1(delta)).toThrow(InvalidDeltaFormatError);
});
});
it("should throw error for invalid host/creator", () => {
const invalidDeltas = [
{ id: "id", timeCreated: 123, host: null, creator: "creator", pointers: [] },
{ id: "id", timeCreated: 123, host: "", creator: "creator", pointers: [] },
{ id: "id", timeCreated: 123, host: 123, creator: "creator", pointers: [] },
{ id: "id", timeCreated: 123, host: "host", creator: null, pointers: [] },
{ id: "id", timeCreated: 123, host: "host", creator: "", pointers: [] },
{ id: "id", timeCreated: 123, host: "host", creator: 123, pointers: [] }
];
invalidDeltas.forEach(delta => {
expect(() => validateDeltaNetworkImageV1(delta)).toThrow(InvalidDeltaFormatError);
});
});
it("should throw error for non-array pointers", () => {
const invalidDeltas = [
{ id: "id", timeCreated: 123, host: "host", creator: "creator", pointers: null },
{ id: "id", timeCreated: 123, host: "host", creator: "creator", pointers: {} },
{ id: "id", timeCreated: 123, host: "host", creator: "creator", pointers: "pointers" },
{ id: "id", timeCreated: 123, host: "host", creator: "creator", pointers: 123 }
];
invalidDeltas.forEach(delta => {
expect(() => validateDeltaNetworkImageV1(delta)).toThrow(InvalidDeltaFormatError);
});
});
it("should throw error for empty pointers array", () => {
const delta = { id: "id", timeCreated: 123, host: "host", creator: "creator", pointers: [] };
expect(() => validateDeltaNetworkImageV1(delta)).toThrow(InvalidDeltaFormatError);
});
it("should throw error for invalid pointer structure", () => {
const invalidPointers = [
[null],
["string"],
[123],
[{ localContext: null, target: "target" }],
[{ localContext: "", target: "target" }],
[{ localContext: 123, target: "target" }],
[{ localContext: "context", target: undefined }],
[{ localContext: "context", target: {} }],
[{ localContext: "context", target: [] }]
];
invalidPointers.forEach(pointers => {
const delta = { id: "id", timeCreated: 123, host: "host", creator: "creator", pointers };
expect(() => validateDeltaNetworkImageV1(delta)).toThrow(InvalidPointerError);
});
});
it("should throw error for invalid targetContext", () => {
const invalidPointers = [
[{ localContext: "context", target: "target", targetContext: null }],
[{ localContext: "context", target: "target", targetContext: "" }],
[{ localContext: "context", target: "target", targetContext: 123 }],
[{ localContext: "context", target: "target", targetContext: {} }]
];
invalidPointers.forEach(pointers => {
const delta = { id: "id", timeCreated: 123, host: "host", creator: "creator", pointers };
expect(() => validateDeltaNetworkImageV1(delta)).toThrow(InvalidPointerError);
});
});
it("should throw error for pointer consistency violation", () => {
// If targetContext exists, target must be a string (reference)
const pointers = [{ localContext: "context", target: 123, targetContext: "property" }];
const delta = { id: "id", timeCreated: 123, host: "host", creator: "creator", pointers };
expect(() => validateDeltaNetworkImageV1(delta)).toThrow(InvalidPointerError);
});
});
describe("DeltaV2 validation", () => {
it("should throw error for non-object delta", () => {
expect(() => validateDeltaNetworkImageV2(null)).toThrow(InvalidDeltaFormatError);
expect(() => validateDeltaNetworkImageV2("string")).toThrow(InvalidDeltaFormatError);
expect(() => validateDeltaNetworkImageV2(123)).toThrow(InvalidDeltaFormatError);
expect(() => validateDeltaNetworkImageV2([])).toThrow(InvalidDeltaFormatError);
});
it("should throw error for invalid pointers object", () => {
const invalidDeltas = [
{ id: "id", timeCreated: 123, host: "host", creator: "creator", pointers: null },
{ id: "id", timeCreated: 123, host: "host", creator: "creator", pointers: [] },
{ id: "id", timeCreated: 123, host: "host", creator: "creator", pointers: "pointers" },
{ id: "id", timeCreated: 123, host: "host", creator: "creator", pointers: 123 }
];
invalidDeltas.forEach(delta => {
expect(() => validateDeltaNetworkImageV2(delta)).toThrow(InvalidDeltaFormatError);
});
});
it("should throw error for empty pointers object", () => {
const delta = { id: "id", timeCreated: 123, host: "host", creator: "creator", pointers: {} };
expect(() => validateDeltaNetworkImageV2(delta)).toThrow(InvalidDeltaFormatError);
});
it("should throw error for invalid pointer keys", () => {
const invalidPointers = [
{ "": "value" }
];
invalidPointers.forEach(pointers => {
const delta = { id: "id", timeCreated: 123, host: "host", creator: "creator", pointers };
expect(() => validateDeltaNetworkImageV2(delta)).toThrow(InvalidPointerError);
});
});
it("should throw error for invalid pointer values", () => {
const invalidPointers = [
{ key: undefined },
{ key: [] }
];
invalidPointers.forEach(pointers => {
const delta = { id: "id", timeCreated: 123, host: "host", creator: "creator", pointers };
expect(() => validateDeltaNetworkImageV2(delta)).toThrow(InvalidPointerError);
});
});
it("should throw error for invalid reference format", () => {
const invalidReferences = [
{ key: {} }, // Empty reference
{ key: { ref1: "val1", ref2: "val2" } }, // Multiple keys
{ key: { "": "value" } }, // Empty key
{ key: { ref: "" } }, // Empty value
{ key: { ref: 123 } }, // Non-string value
{ key: { ref: null } } // Null value
];
invalidReferences.forEach(pointers => {
const delta = { id: "id", timeCreated: 123, host: "host", creator: "creator", pointers };
expect(() => validateDeltaNetworkImageV2(delta)).toThrow(InvalidPointerError);
});
});
});
});
describe("Missing Required Fields", () => {
describe("DeltaV1", () => {
it("should throw MissingRequiredFieldError for missing id", () => {
const delta = { timeCreated: 123, host: "host", creator: "creator", pointers: [] };
expect(() => validateDeltaNetworkImageV1(delta)).toThrow(MissingRequiredFieldError);
expect(() => validateDeltaNetworkImageV1(delta)).toThrow(/id/);
});
it("should throw MissingRequiredFieldError for missing timeCreated", () => {
const delta = { id: "id", host: "host", creator: "creator", pointers: [] };
expect(() => validateDeltaNetworkImageV1(delta)).toThrow(MissingRequiredFieldError);
expect(() => validateDeltaNetworkImageV1(delta)).toThrow(/timeCreated/);
});
it("should throw MissingRequiredFieldError for missing host", () => {
const delta = { id: "id", timeCreated: 123, creator: "creator", pointers: [] };
expect(() => validateDeltaNetworkImageV1(delta)).toThrow(MissingRequiredFieldError);
expect(() => validateDeltaNetworkImageV1(delta)).toThrow(/host/);
});
it("should throw MissingRequiredFieldError for missing creator", () => {
const delta = { id: "id", timeCreated: 123, host: "host", pointers: [] };
expect(() => validateDeltaNetworkImageV1(delta)).toThrow(MissingRequiredFieldError);
expect(() => validateDeltaNetworkImageV1(delta)).toThrow(/creator/);
});
it("should throw MissingRequiredFieldError for missing pointers", () => {
const delta = { id: "id", timeCreated: 123, host: "host", creator: "creator" };
expect(() => validateDeltaNetworkImageV1(delta)).toThrow(MissingRequiredFieldError);
expect(() => validateDeltaNetworkImageV1(delta)).toThrow(/pointers/);
});
});
describe("DeltaV2", () => {
it("should throw MissingRequiredFieldError for all missing fields", () => {
const requiredFields = ["id", "timeCreated", "host", "creator", "pointers"];
requiredFields.forEach(field => {
const delta: Record<string, unknown> = {
id: "id",
timeCreated: 123,
host: "host",
creator: "creator",
pointers: { key: "value" }
};
delete delta[field];
expect(() => validateDeltaNetworkImageV2(delta)).toThrow(MissingRequiredFieldError);
expect(() => validateDeltaNetworkImageV2(delta)).toThrow(new RegExp(field));
});
});
});
});
describe("Valid Delta Formats", () => {
it("should accept valid DeltaV1", () => {
const validDeltas = [
{
id: "uuid-123",
timeCreated: 123456789,
host: "host1",
creator: "creator1",
pointers: [{ localContext: "name", target: "Alice" }]
},
{
id: "uuid-456",
timeCreated: 987654321,
host: "host2",
creator: "creator2",
pointers: [
{ localContext: "name", target: "Bob" },
{ localContext: "age", target: 25 },
{ localContext: "active", target: null }
]
},
{
id: "uuid-789",
timeCreated: 111111111,
host: "host3",
creator: "creator3",
pointers: [{ localContext: "friend", target: "user123", targetContext: "friendOf" }]
}
];
validDeltas.forEach(delta => {
expect(() => validateDeltaNetworkImageV1(delta)).not.toThrow();
});
});
it("should accept valid DeltaV2", () => {
const validDeltas = [
{
id: "uuid-123",
timeCreated: 123456789,
host: "host1",
creator: "creator1",
pointers: { name: "Alice" }
},
{
id: "uuid-456",
timeCreated: 987654321,
host: "host2",
creator: "creator2",
pointers: {
name: "Bob",
age: 25,
active: null
}
},
{
id: "uuid-789",
timeCreated: 111111111,
host: "host3",
creator: "creator3",
pointers: { friend: { user123: "friendOf" } }
}
];
validDeltas.forEach(delta => {
expect(() => validateDeltaNetworkImageV2(delta)).not.toThrow();
});
});
});
describe("Delta class integration", () => {
it("should validate when creating DeltaV1 from network image", () => {
const invalidDelta = {
id: "id",
timeCreated: "not-a-number",
host: "host",
creator: "creator",
pointers: [{ localContext: "name", target: "value" }]
};
expect(() => DeltaV1.fromNetworkImage(invalidDelta as never)).toThrow(InvalidDeltaFormatError);
});
it("should validate when creating DeltaV2 from network image", () => {
const invalidDelta = {
id: "id",
timeCreated: 123,
host: "",
creator: "creator",
pointers: { name: "value" }
};
expect(() => DeltaV2.fromNetworkImage(invalidDelta as never)).toThrow(InvalidDeltaFormatError);
});
it("should accept valid network images", () => {
const validV1 = {
id: "uuid-123",
timeCreated: 123456789,
host: "host1",
creator: "creator1",
pointers: [{ localContext: "name", target: "Alice" }]
};
const validV2 = {
id: "uuid-456",
timeCreated: 987654321,
host: "host2",
creator: "creator2",
pointers: { name: "Bob" }
};
expect(() => DeltaV1.fromNetworkImage(validV1)).not.toThrow();
expect(() => DeltaV2.fromNetworkImage(validV2)).not.toThrow();
});
});
});

View File

@ -1,4 +1,4 @@
import {DeltaV1, DeltaV2} from "../src/delta";
import {DeltaV1, DeltaV2} from "../src";
describe("Delta", () => {
it("can convert DeltaV1 to DeltaV2", () => {

View File

@ -1,8 +1,5 @@
import Debug from "debug";
import {Delta} from "../src/delta";
import {LastWriteWins} from "../src/last-write-wins";
import {Lossless} from "../src/lossless";
import {RhizomeNode} from "../src/node";
import {Delta, LastWriteWins, Lossless, RhizomeNode} from "../src";
const debug = Debug('test:last-write-wins');
describe('Last write wins', () => {

View File

@ -1,5 +1,5 @@
import {Delta, DeltaFilter, DeltaV2} from '../src/delta';
import {Lossless} from '../src/lossless';
import {Delta, DeltaFilter, DeltaV2} from '../src/core';
import {Lossless} from '../src/views';
import {RhizomeNode} from '../src/node';
describe('Lossless', () => {
@ -177,6 +177,69 @@ describe('Lossless', () => {
});
});
// TODO: Test with transactions, say A1 -- B -- A2
it('filter with transactions', () => {
const losslessT = new Lossless(node);
const transactionId = 'tx-filter-test';
// Declare transaction with 3 deltas
losslessT.ingestDelta(new Delta({
creator: 'system',
host: 'H',
pointers: [
{ localContext: '_transaction', target: transactionId, targetContext: 'size' },
{ localContext: 'size', target: 3 }
]
}));
// A1: First delta from creator A
losslessT.ingestDelta(new Delta({
creator: 'A',
host: 'H',
pointers: [
{ localContext: '_transaction', target: transactionId, targetContext: 'deltas' },
{ localContext: 'step', target: 'process1', targetContext: 'status' },
{ localContext: 'value', target: 'started' }
]
}));
// B: Delta from creator B
losslessT.ingestDelta(new Delta({
creator: 'B',
host: 'H',
pointers: [
{ localContext: '_transaction', target: transactionId, targetContext: 'deltas' },
{ localContext: 'step', target: 'process1', targetContext: 'status' },
{ localContext: 'value', target: 'processing' }
]
}));
// Transaction incomplete - nothing should show
const incompleteView = losslessT.view(['process1']);
expect(incompleteView.process1).toBeUndefined();
// A2: Second delta from creator A completes transaction
losslessT.ingestDelta(new Delta({
creator: 'A',
host: 'H',
pointers: [
{ localContext: '_transaction', target: transactionId, targetContext: 'deltas' },
{ localContext: 'step', target: 'process1', targetContext: 'status' },
{ localContext: 'value', target: 'completed' }
]
}));
// All deltas visible now
const completeView = losslessT.view(['process1']);
expect(completeView.process1).toBeDefined();
expect(completeView.process1.propertyDeltas.status).toHaveLength(3);
// Filter by creator A only
const filterA: DeltaFilter = ({creator}) => creator === 'A';
const filteredView = losslessT.view(['process1'], filterA);
expect(filteredView.process1).toBeDefined();
expect(filteredView.process1.propertyDeltas.status).toHaveLength(2);
expect(filteredView.process1.propertyDeltas.status.every(d => d.creator === 'A')).toBe(true);
});
});
});

View File

@ -1,9 +1,14 @@
import Debug from 'debug';
import {Delta, PointerTarget} from "../src/delta";
import {lastValueFromDeltas, valueFromCollapsedDelta} from "../src/last-write-wins";
import {Lossless, LosslessViewOne} from "../src/lossless";
import {Lossy} from "../src/lossy";
import {RhizomeNode} from "../src/node";
import {
Delta,
PointerTarget,
lastValueFromDeltas,
valueFromCollapsedDelta,
Lossless,
LosslessViewOne,
Lossy,
RhizomeNode
} from "../src";
const debug = Debug('test:lossy');
type Role = {

View File

@ -0,0 +1,307 @@
/**
* Tests for nested object resolution with deltas containing 3+ pointers
* This tests the complex case where a single delta establishes relationships
* between multiple entities and includes scalar values.
*/
import { RhizomeNode } from '../src/node';
import { Delta } from '../src/core';
import { DefaultSchemaRegistry } from '../src/schema';
import { SchemaBuilder, PrimitiveSchemas, ReferenceSchemas, SchemaAppliedViewWithNesting } from '../src/schema';
import { TypedCollectionImpl } from '../src/collections';
describe('Multi-Pointer Delta Resolution', () => {
let node: RhizomeNode;
let schemaRegistry: DefaultSchemaRegistry;
beforeEach(() => {
node = new RhizomeNode();
schemaRegistry = new DefaultSchemaRegistry();
});
describe('Three-Entity Relationship Deltas', () => {
it('should handle movie casting deltas with actor, movie, role, and scalars', async () => {
// Create schemas for a movie casting scenario
const actorSchema = SchemaBuilder
.create('actor')
.name('Actor')
.property('name', PrimitiveSchemas.requiredString())
.property('filmography', ReferenceSchemas.to('casting-summary', 3))
.required('name')
.build();
const movieSchema = SchemaBuilder
.create('movie')
.name('Movie')
.property('title', PrimitiveSchemas.requiredString())
.property('cast', ReferenceSchemas.to('casting-summary', 3))
.required('title')
.build();
const roleSchema = SchemaBuilder
.create('role')
.name('Role')
.property('name', PrimitiveSchemas.requiredString())
.property('portrayals', ReferenceSchemas.to('casting-summary', 3))
.required('name')
.build();
const castingSummarySchema = SchemaBuilder
.create('casting-summary')
.name('Casting Summary')
.property('name', PrimitiveSchemas.string())
.property('title', PrimitiveSchemas.string())
.property('salary', PrimitiveSchemas.number())
.property('contract_date', PrimitiveSchemas.string())
.additionalProperties(false)
.build();
schemaRegistry.register(actorSchema);
schemaRegistry.register(movieSchema);
schemaRegistry.register(roleSchema);
schemaRegistry.register(castingSummarySchema);
// Create collections
const actorCollection = new TypedCollectionImpl<{ name: string }>('actors', actorSchema, schemaRegistry);
const movieCollection = new TypedCollectionImpl<{ title: string }>('movies', movieSchema, schemaRegistry);
const roleCollection = new TypedCollectionImpl<{ name: string }>('roles', roleSchema, schemaRegistry);
actorCollection.rhizomeConnect(node);
movieCollection.rhizomeConnect(node);
roleCollection.rhizomeConnect(node);
// Create entities
await actorCollection.put('keanu', { name: 'Keanu Reeves' });
await movieCollection.put('matrix', { title: 'The Matrix' });
await roleCollection.put('neo', { name: 'Neo' });
// Create a complex casting delta with multiple entity references and scalar values
const castingDelta = new Delta({
creator: node.config.creator,
host: node.config.peerId,
pointers: [
{ localContext: 'actors', target: 'keanu', targetContext: 'filmography' },
{ localContext: 'movies', target: 'matrix', targetContext: 'cast' },
{ localContext: 'roles', target: 'neo', targetContext: 'portrayals' },
{ localContext: 'salary', target: 15000000 },
{ localContext: 'contract_date', target: '1999-03-31' }
]
});
node.lossless.ingestDelta(castingDelta);
// Test from Keanu's perspective
const keanuViews = node.lossless.view(['keanu']);
const keanuView = keanuViews['keanu'];
expect(keanuView.propertyDeltas.filmography).toBeDefined();
expect(keanuView.propertyDeltas.filmography.length).toBe(1);
const nestedKeanuView = schemaRegistry.applySchemaWithNesting(
keanuView,
'actor',
node.lossless,
{ maxDepth: 2 }
);
expect(nestedKeanuView.id).toBe('keanu');
// Should resolve references to matrix and neo, but not keanu (self)
expect(nestedKeanuView.nestedObjects.filmography).toBeDefined();
if (nestedKeanuView.nestedObjects.filmography) {
const castingEntry = nestedKeanuView.nestedObjects.filmography[0];
expect(castingEntry).toBeDefined();
// The casting entry should be resolved with casting-summary schema
expect(castingEntry.schemaId).toBe('casting-summary');
// Should not contain a reference to keanu (the parent)
expect(castingEntry.id).not.toBe('keanu');
}
// Test from Matrix's perspective
const matrixViews = node.lossless.view(['matrix']);
const matrixView = matrixViews['matrix'];
const nestedMatrixView = schemaRegistry.applySchemaWithNesting(
matrixView,
'movie',
node.lossless,
{ maxDepth: 2 }
);
expect(nestedMatrixView.id).toBe('matrix');
expect(nestedMatrixView.nestedObjects.cast).toBeDefined();
});
it('should handle deltas with mixed scalar and reference values correctly', async () => {
// Create a simpler schema for testing mixed values
const personSchema = SchemaBuilder
.create('person')
.name('Person')
.property('name', PrimitiveSchemas.requiredString())
.property('relationships', ReferenceSchemas.to('relationship-summary', 3))
.required('name')
.build();
const relationshipSummarySchema = SchemaBuilder
.create('relationship-summary')
.name('Relationship Summary')
.property('partner_name', PrimitiveSchemas.string())
.property('type', PrimitiveSchemas.string())
.property('since', PrimitiveSchemas.string())
.property('intensity', PrimitiveSchemas.number())
.additionalProperties(false)
.build();
schemaRegistry.register(personSchema);
schemaRegistry.register(relationshipSummarySchema);
const personCollection = new TypedCollectionImpl<{ name: string }>('people', personSchema, schemaRegistry);
personCollection.rhizomeConnect(node);
// Create people
await personCollection.put('alice', { name: 'Alice' });
await personCollection.put('bob', { name: 'Bob' });
// Create a relationship delta with one entity reference and multiple scalars
const relationshipDelta = new Delta({
creator: node.config.creator,
host: node.config.peerId,
pointers: [
{ localContext: 'people', target: 'alice', targetContext: 'relationships' },
{ localContext: 'partner', target: 'bob' }, // Entity reference
{ localContext: 'type', target: 'friendship' }, // Scalar
{ localContext: 'since', target: '2020-01-15' }, // Scalar
{ localContext: 'intensity', target: 8 } // Scalar number
]
});
node.lossless.ingestDelta(relationshipDelta);
// Test from Alice's perspective
const aliceViews = node.lossless.view(['alice']);
const aliceView = aliceViews['alice'];
const nestedAliceView = schemaRegistry.applySchemaWithNesting(
aliceView,
'person',
node.lossless,
{ maxDepth: 2 }
);
expect(nestedAliceView.id).toBe('alice');
expect(nestedAliceView.nestedObjects.relationships).toBeDefined();
if (nestedAliceView.nestedObjects.relationships) {
expect(nestedAliceView.nestedObjects.relationships.length).toBe(1);
const relationshipEntry = nestedAliceView.nestedObjects.relationships[0];
// Should be resolved with relationship-summary schema
expect(relationshipEntry.schemaId).toBe('relationship-summary');
// Should contain scalar values and reference to bob but not alice
expect(relationshipEntry.id).not.toBe('alice');
// The relationship should contain the scalar values from the delta
// Note: The exact structure depends on how the resolution logic handles mixed values
}
});
it('should correctly identify multiple entity references within a single delta', async () => {
// Test a scenario with multiple entity references that should all be resolved
const projectSchema = SchemaBuilder
.create('project')
.name('Project')
.property('name', PrimitiveSchemas.requiredString())
.property('collaborations', ReferenceSchemas.to('collaboration-summary', 3))
.required('name')
.build();
const collaborationSummarySchema = SchemaBuilder
.create('collaboration-summary')
.name('Collaboration Summary')
.property('project_name', PrimitiveSchemas.string())
.property('developer_name', PrimitiveSchemas.string())
.property('designer_name', PrimitiveSchemas.string())
.property('budget', PrimitiveSchemas.number())
.additionalProperties(false)
.build();
schemaRegistry.register(projectSchema);
schemaRegistry.register(collaborationSummarySchema);
const projectCollection = new TypedCollectionImpl<{ name: string }>('projects', projectSchema, schemaRegistry);
const developerCollection = new TypedCollectionImpl<{ name: string }>('developers', projectSchema, schemaRegistry);
const designerCollection = new TypedCollectionImpl<{ name: string }>('designers', projectSchema, schemaRegistry);
projectCollection.rhizomeConnect(node);
developerCollection.rhizomeConnect(node);
designerCollection.rhizomeConnect(node);
// Create entities
await projectCollection.put('website', { name: 'Company Website' });
await developerCollection.put('alice', { name: 'Alice Developer' });
await designerCollection.put('bob', { name: 'Bob Designer' });
// Create a collaboration delta with multiple entity references
const collaborationDelta = new Delta({
creator: node.config.creator,
host: node.config.peerId,
pointers: [
{ localContext: 'projects', target: 'website', targetContext: 'collaborations' },
{ localContext: 'developer', target: 'alice' }, // Entity reference
{ localContext: 'designer', target: 'bob' }, // Entity reference
{ localContext: 'budget', target: 50000 }, // Scalar
{ localContext: 'deadline', target: '2024-06-01' } // Scalar
]
});
node.lossless.ingestDelta(collaborationDelta);
// Test from project's perspective
const projectViews = node.lossless.view(['website']);
const projectView = projectViews['website'];
const nestedProjectView = schemaRegistry.applySchemaWithNesting(
projectView,
'project',
node.lossless,
{ maxDepth: 2 }
);
expect(nestedProjectView.id).toBe('website');
expect(nestedProjectView.nestedObjects.collaborations).toBeDefined();
if (nestedProjectView.nestedObjects.collaborations) {
// Verify we get exactly 1 composite object (not 2 separate objects)
expect(nestedProjectView.nestedObjects.collaborations.length).toBe(1);
const collaboration = nestedProjectView.nestedObjects.collaborations[0];
expect(collaboration.schemaId).toBe('collaboration-summary');
expect(collaboration.id).toMatch(/^composite-/); // Should be a synthetic composite ID
// Verify the composite object contains scalar properties
expect(collaboration.properties.budget).toBe(50000);
expect(collaboration.properties.deadline).toBe('2024-06-01');
// Verify the composite object contains nested entity references
expect(collaboration.nestedObjects.developer).toBeDefined();
expect(collaboration.nestedObjects.designer).toBeDefined();
// The nested entities should be resolved as arrays with single objects
const developers = collaboration.nestedObjects.developer as SchemaAppliedViewWithNesting[];
const designers = collaboration.nestedObjects.designer as SchemaAppliedViewWithNesting[];
// Each should have exactly one entity
expect(developers.length).toBe(1);
expect(designers.length).toBe(1);
// Verify each entity reference resolves to the correct entity
expect(developers[0].id).toBe('alice');
expect(developers[0].schemaId).toBe('collaboration-summary');
expect(designers[0].id).toBe('bob');
expect(designers[0].schemaId).toBe('collaboration-summary');
}
});
});
});

492
__tests__/negation.ts Normal file
View File

@ -0,0 +1,492 @@
import * as _RhizomeImports from "../src";
Review

This line appears to do nothing

This line appears to do nothing
import { Delta } from '../src/core';
import { NegationHelper } from '../src/features';
import { RhizomeNode } from '../src/node';
import { Lossless } from '../src/views';
describe('Negation System', () => {
let node: RhizomeNode;
let lossless: Lossless;
beforeEach(() => {
node = new RhizomeNode();
lossless = new Lossless(node);
});
describe('Negation Helper', () => {
it('should create negation deltas correctly', () => {
const originalDelta = new Delta({
creator: 'user1',
host: 'host1',
pointers: [
{ localContext: 'name', target: 'entity1', targetContext: 'name' },
{ localContext: 'value', target: 'Alice' }
]
});
const negationDelta = NegationHelper.createNegation(
originalDelta.id,
'moderator',
'host1'
);
expect(negationDelta.isNegation).toBe(true);
expect(negationDelta.negatedDeltaId).toBe(originalDelta.id);
expect(negationDelta.creator).toBe('moderator');
expect(negationDelta.pointers).toHaveLength(1);
expect(negationDelta.pointers[0]).toEqual({
localContext: 'negates',
target: originalDelta.id,
targetContext: 'negated_by'
});
});
it('should identify negation deltas', () => {
const regularDelta = new Delta({
creator: 'user1',
host: 'host1',
pointers: [{ localContext: 'name', target: 'entity1', targetContext: 'name' }]
});
const negationDelta = NegationHelper.createNegation(
'delta-to-negate',
'moderator',
'host1'
);
expect(NegationHelper.isNegationDelta(regularDelta)).toBe(false);
expect(NegationHelper.isNegationDelta(negationDelta)).toBe(true);
});
it('should extract negated delta ID', () => {
const targetDeltaId = 'target-delta-123';
const negationDelta = NegationHelper.createNegation(
targetDeltaId,
'moderator',
'host1'
);
const extractedId = NegationHelper.getNegatedDeltaId(negationDelta);
expect(extractedId).toBe(targetDeltaId);
const regularDelta = new Delta({
creator: 'user1',
host: 'host1',
pointers: [{ localContext: 'name', target: 'entity1', targetContext: 'name' }]
});
expect(NegationHelper.getNegatedDeltaId(regularDelta)).toBeNull();
});
it('should find negations for specific deltas', () => {
const delta1 = new Delta({
creator: 'user1',
host: 'host1',
pointers: [{ localContext: 'name', target: 'entity1', targetContext: 'name' }]
});
const delta2 = new Delta({
creator: 'user2',
host: 'host1',
pointers: [{ localContext: 'age', target: 'entity1', targetContext: 'age' }]
});
const negation1 = NegationHelper.createNegation(delta1.id, 'mod1', 'host1');
const negation2 = NegationHelper.createNegation(delta1.id, 'mod2', 'host1');
const negation3 = NegationHelper.createNegation(delta2.id, 'mod1', 'host1');
const allDeltas = [delta1, delta2, negation1, negation2, negation3];
const negationsForDelta1 = NegationHelper.findNegationsFor(delta1.id, allDeltas);
expect(negationsForDelta1).toHaveLength(2);
expect(negationsForDelta1.map(d => d.id)).toContain(negation1.id);
expect(negationsForDelta1.map(d => d.id)).toContain(negation2.id);
const negationsForDelta2 = NegationHelper.findNegationsFor(delta2.id, allDeltas);
expect(negationsForDelta2).toHaveLength(1);
expect(negationsForDelta2[0].id).toBe(negation3.id);
});
it('should check if deltas are negated', () => {
const delta1 = new Delta({
creator: 'user1',
host: 'host1',
pointers: [{ localContext: 'name', target: 'entity1', targetContext: 'name' }]
});
const delta2 = new Delta({
creator: 'user2',
host: 'host1',
pointers: [{ localContext: 'age', target: 'entity1', targetContext: 'age' }]
});
const negation1 = NegationHelper.createNegation(delta1.id, 'mod1', 'host1');
const allDeltas = [delta1, delta2, negation1];
expect(NegationHelper.isDeltaNegated(delta1.id, allDeltas)).toBe(true);
expect(NegationHelper.isDeltaNegated(delta2.id, allDeltas)).toBe(false);
});
it('should filter out negated deltas', () => {
const delta1 = new Delta({
creator: 'user1',
host: 'host1',
pointers: [{ localContext: 'name', target: 'entity1', targetContext: 'name' }]
});
const delta2 = new Delta({
creator: 'user2',
host: 'host1',
pointers: [{ localContext: 'age', target: 'entity1', targetContext: 'age' }]
});
const delta3 = new Delta({
creator: 'user3',
host: 'host1',
pointers: [{ localContext: 'email', target: 'entity1', targetContext: 'email' }]
});
const negation1 = NegationHelper.createNegation(delta1.id, 'mod1', 'host1');
const negation2 = NegationHelper.createNegation(delta2.id, 'mod2', 'host1');
const allDeltas = [delta1, delta2, delta3, negation1, negation2];
const filtered = NegationHelper.filterNegatedDeltas(allDeltas);
// Should only include delta3 (delta1 and delta2 are negated, negations themselves are filtered)
expect(filtered).toHaveLength(1);
expect(filtered[0].id).toBe(delta3.id);
});
it('should provide negation statistics', () => {
const delta1 = new Delta({
creator: 'user1',
host: 'host1',
pointers: [{ localContext: 'name', target: 'entity1', targetContext: 'name' }]
});
const delta2 = new Delta({
creator: 'user2',
host: 'host1',
pointers: [{ localContext: 'age', target: 'entity1', targetContext: 'age' }]
});
const negation1 = NegationHelper.createNegation(delta1.id, 'mod1', 'host1');
const allDeltas = [delta1, delta2, negation1];
const stats = NegationHelper.getNegationStats(allDeltas);
expect(stats.totalDeltas).toBe(3);
expect(stats.negationDeltas).toBe(1);
expect(stats.negatedDeltas).toBe(1);
expect(stats.effectiveDeltas).toBe(1); // only delta2 is effective
expect(stats.negatedDeltaIds).toContain(delta1.id);
expect(stats.negationMap.get(delta1.id)).toContain(negation1.id);
});
it('should apply negations chronologically', () => {
const baseTime = Date.now();
// Create deltas with specific timestamps
const delta1 = new Delta({
creator: 'user1',
host: 'host1',
timeCreated: baseTime,
pointers: [{ localContext: 'status', target: 'doc1', targetContext: 'status' }]
});
const negation1 = NegationHelper.createNegation(delta1.id, 'mod1', 'host1');
negation1.timeCreated = baseTime + 1000; // 1 second later
const delta2 = new Delta({
creator: 'user1',
host: 'host1',
timeCreated: baseTime + 2000, // 2 seconds later
pointers: [{ localContext: 'status', target: 'doc1', targetContext: 'status' }]
});
const negation2 = NegationHelper.createNegation(delta2.id, 'mod1', 'host1');
negation2.timeCreated = baseTime + 3000; // 3 seconds later
const allDeltas = [delta1, negation1, delta2, negation2];
const filtered = NegationHelper.applyNegationsChronologically(allDeltas);
// Both deltas should be negated
expect(filtered).toHaveLength(0);
});
});
describe('Lossless View Integration', () => {
it('should filter negated deltas in lossless views', () => {
// Create original delta
const originalDelta = new Delta({
creator: 'user1',
host: 'host1',
pointers: [
{ localContext: 'name', target: 'user123', targetContext: 'name' },
{ localContext: 'value', target: 'Alice' }
]
});
// Create negation delta
const negationDelta = NegationHelper.createNegation(
originalDelta.id,
'moderator',
'host1'
);
// Create another non-negated delta
const nonNegatedDelta = new Delta({
creator: 'user2',
host: 'host1',
pointers: [
{ localContext: 'age', target: 'user123', targetContext: 'age' },
{ localContext: 'value', target: 25 }
]
});
// Ingest all deltas
lossless.ingestDelta(originalDelta);
lossless.ingestDelta(negationDelta);
lossless.ingestDelta(nonNegatedDelta);
// Get view - should only show non-negated delta
const view = lossless.view(['user123']);
expect(view.user123).toBeDefined();
// Should only have age property (name was negated)
expect(view.user123.propertyDeltas.age).toHaveLength(1);
expect(view.user123.propertyDeltas.name).toBeUndefined();
});
it('should handle multiple negations of the same delta', () => {
const originalDelta = new Delta({
creator: 'user1',
host: 'host1',
pointers: [
{ localContext: 'content', target: 'post1', targetContext: 'content' },
{ localContext: 'value', target: 'Original content' }
]
});
const negation1 = NegationHelper.createNegation(originalDelta.id, 'mod1', 'host1');
const negation2 = NegationHelper.createNegation(originalDelta.id, 'mod2', 'host1');
lossless.ingestDelta(originalDelta);
lossless.ingestDelta(negation1);
lossless.ingestDelta(negation2);
const view = lossless.view(['post1']);
// Original delta should be negated (not visible)
expect(view.post1).toBeUndefined();
});
it('should provide negation statistics for entities', () => {
const delta1 = new Delta({
creator: 'user1',
host: 'host1',
pointers: [
{ localContext: 'title', target: 'article1', targetContext: 'title' },
{ localContext: 'value', target: 'Original Title' }
]
});
const delta2 = new Delta({
creator: 'user2',
host: 'host1',
pointers: [
{ localContext: 'content', target: 'article1', targetContext: 'content' },
{ localContext: 'value', target: 'Article content' }
]
});
const negation1 = NegationHelper.createNegation(delta1.id, 'mod1', 'host1');
lossless.ingestDelta(delta1);
lossless.ingestDelta(delta2);
lossless.ingestDelta(negation1);
const stats = lossless.getNegationStats('article1');
expect(stats.totalDeltas).toBe(3);
expect(stats.negationDeltas).toBe(1);
expect(stats.negatedDeltas).toBe(1);
expect(stats.effectiveDeltas).toBe(1);
expect(stats.negationsByProperty.title.negated).toBe(1);
expect(stats.negationsByProperty.content.negated).toBe(0);
});
it('should retrieve negation deltas for entities', () => {
const originalDelta = new Delta({
creator: 'user1',
host: 'host1',
pointers: [
{ localContext: 'status', target: 'task1', targetContext: 'status' },
{ localContext: 'value', target: 'pending' }
]
});
const negationDelta = NegationHelper.createNegation(
originalDelta.id,
'admin',
'host1'
);
lossless.ingestDelta(originalDelta);
lossless.ingestDelta(negationDelta);
const negations = lossless.getNegationDeltas('task1');
expect(negations).toHaveLength(1);
expect(negations[0].id).toBe(negationDelta.id);
expect(negations[0].creator).toBe('admin');
});
it('should handle negation in transactions', () => {
const transactionId = 'tx-negation';
// Create transaction declaration
lossless.ingestDelta(new Delta({
creator: 'system',
host: 'host1',
pointers: [
{ localContext: '_transaction', target: transactionId, targetContext: 'size' },
{ localContext: 'size', target: 2 }
]
}));
// Create original delta in transaction
const originalDelta = new Delta({
creator: 'user1',
host: 'host1',
pointers: [
{ localContext: '_transaction', target: transactionId, targetContext: 'deltas' },
{ localContext: 'comment', target: 'post1', targetContext: 'comments' },
{ localContext: 'text', target: 'Inappropriate comment' }
]
});
// Create negation delta in same transaction
const negationDelta = NegationHelper.createNegation(originalDelta.id, 'moderator', 'host1');
negationDelta.pointers.unshift({
localContext: '_transaction',
target: transactionId,
targetContext: 'deltas'
});
lossless.ingestDelta(originalDelta);
lossless.ingestDelta(negationDelta);
// Transaction should complete, but original delta should be negated
const view = lossless.view(['post1']);
expect(view.post1).toBeUndefined(); // No visible deltas
});
it('should handle chronological negation scenarios', () => {
const baseTime = Date.now();
// User posts content
const postDelta = new Delta({
creator: 'user1',
host: 'host1',
timeCreated: baseTime,
pointers: [
{ localContext: 'content', target: 'post1', targetContext: 'content' },
{ localContext: 'value', target: 'Original post' }
]
});
// Moderator negates it
const negationDelta = NegationHelper.createNegation(postDelta.id, 'moderator', 'host1');
negationDelta.timeCreated = baseTime + 1000;
// User edits content (after negation)
const editDelta = new Delta({
creator: 'user1',
host: 'host1',
timeCreated: baseTime + 2000,
pointers: [
{ localContext: 'content', target: 'post1', targetContext: 'content' },
{ localContext: 'value', target: 'Edited post' }
]
});
lossless.ingestDelta(postDelta);
lossless.ingestDelta(negationDelta);
lossless.ingestDelta(editDelta);
const view = lossless.view(['post1']);
// Should show edited content (edit happened after negation)
expect(view.post1).toBeDefined();
expect(view.post1.propertyDeltas.content).toHaveLength(1);
// The visible delta should be the edit delta
const visibleDelta = view.post1.propertyDeltas.content[0];
expect(visibleDelta.id).toBe(editDelta.id);
});
});
describe('Edge Cases', () => {
it('should handle negation of non-existent deltas', () => {
const negationDelta = NegationHelper.createNegation(
'non-existent-delta-id',
'moderator',
'host1'
);
lossless.ingestDelta(negationDelta);
// Should not crash and stats should reflect the orphaned negation
const stats = lossless.getNegationStats('entity1');
expect(stats.negationDeltas).toBe(0); // No negations for this entity
});
it('should handle multiple negations and un-negations', () => {
Review

and un-negations

I'm not seeing anything implementing or testing the negation of a negation

> and un-negations I'm not seeing anything implementing or testing the negation of a negation
const originalDelta = new Delta({
creator: 'user1',
host: 'host1',
pointers: [
{ localContext: 'visible', target: 'item1', targetContext: 'visible' },
{ localContext: 'value', target: true }
]
});
const negation1 = NegationHelper.createNegation(originalDelta.id, 'mod1', 'host1');
const negation2 = NegationHelper.createNegation(originalDelta.id, 'mod2', 'host1');
lossless.ingestDelta(originalDelta);
lossless.ingestDelta(negation1);
lossless.ingestDelta(negation2);
// Delta should be thoroughly negated
const view = lossless.view(['item1']);
expect(view.item1).toBeUndefined();
const stats = lossless.getNegationStats('item1');
expect(stats.negatedDeltas).toBe(1);
expect(stats.negationDeltas).toBe(2);
});
it('should handle self-referential entities in negations', () => {
// Create a delta that references itself
const selfRefDelta = new Delta({
creator: 'user1',
host: 'host1',
pointers: [
{ localContext: 'parent', target: 'node1', targetContext: 'parent' },
{ localContext: 'child', target: 'node1' } // Self-reference
]
});
const negationDelta = NegationHelper.createNegation(selfRefDelta.id, 'admin', 'host1');
lossless.ingestDelta(selfRefDelta);
lossless.ingestDelta(negationDelta);
const view = lossless.view(['node1']);
expect(view.node1).toBeUndefined(); // Should be negated
});
});
});

View File

@ -0,0 +1,358 @@
/**
* Performance tests for nested object resolution with large object graphs
*
* Tests performance characteristics of:
* - Large networks of interconnected entities
* - Deep nesting chains
* - Wide arrays of references
* - Circular reference handling at scale
*/
import { RhizomeNode } from '../src/node';
import { Delta } from '../src/core';
import { DefaultSchemaRegistry } from '../src/schema';
import { SchemaBuilder, PrimitiveSchemas, ReferenceSchemas, ArraySchemas } from '../src/schema';
import { TypedCollectionImpl } from '../src/collections';
describe('Nested Object Resolution Performance', () => {
let node: RhizomeNode;
let schemaRegistry: DefaultSchemaRegistry;
beforeEach(() => {
node = new RhizomeNode();
schemaRegistry = new DefaultSchemaRegistry();
});
describe('Large Network Performance', () => {
it('should handle large networks of interconnected users efficiently', async () => {
// Create a schema for users with multiple relationship types
const networkUserSchema = SchemaBuilder
.create('network-user')
.name('Network User')
.property('name', PrimitiveSchemas.requiredString())
.property('friends', ArraySchemas.of(ReferenceSchemas.to('network-user-summary', 2)))
.property('followers', ArraySchemas.of(ReferenceSchemas.to('network-user-summary', 2)))
.property('mentor', ReferenceSchemas.to('network-user-summary', 2))
.required('name')
.build();
const networkUserSummarySchema = SchemaBuilder
.create('network-user-summary')
.name('Network User Summary')
.property('name', PrimitiveSchemas.requiredString())
.required('name')
.additionalProperties(false)
.build();
schemaRegistry.register(networkUserSchema);
schemaRegistry.register(networkUserSummarySchema);
const userCollection = new TypedCollectionImpl<{
name: string;
friends?: string[];
followers?: string[];
mentor?: string;
}>('users', networkUserSchema, schemaRegistry);
userCollection.rhizomeConnect(node);
const startSetup = performance.now();
// Create 100 users
const userCount = 100;
const userIds: string[] = [];
for (let i = 0; i < userCount; i++) {
const userId = `user${i}`;
userIds.push(userId);
await userCollection.put(userId, { name: `User ${i}` });
}
// Create a network where each user has 5-10 friends, 10-20 followers, and 1 mentor
for (let i = 0; i < userCount; i++) {
const userId = userIds[i];
// Add friends (5-10 random connections)
const friendCount = 5 + Math.floor(Math.random() * 6);
for (let j = 0; j < friendCount; j++) {
const friendIndex = Math.floor(Math.random() * userCount);
if (friendIndex !== i) {
const friendId = userIds[friendIndex];
const friendshipDelta = new Delta({
creator: node.config.creator,
host: node.config.peerId,
pointers: [
{ localContext: 'users', target: userId, targetContext: 'friends' },
{ localContext: 'friends', target: friendId }
]
});
node.lossless.ingestDelta(friendshipDelta);
}
}
// Add followers (10-20 random connections)
const followerCount = 10 + Math.floor(Math.random() * 11);
for (let j = 0; j < followerCount; j++) {
const followerIndex = Math.floor(Math.random() * userCount);
if (followerIndex !== i) {
const followerId = userIds[followerIndex];
const followDelta = new Delta({
creator: node.config.creator,
host: node.config.peerId,
pointers: [
{ localContext: 'users', target: userId, targetContext: 'followers' },
{ localContext: 'followers', target: followerId }
]
});
node.lossless.ingestDelta(followDelta);
}
}
// Add mentor (one per user, creating a hierarchy)
if (i > 0) {
const mentorIndex = Math.floor(i / 2); // Create a tree-like mentor structure
const mentorId = userIds[mentorIndex];
const mentorshipDelta = new Delta({
creator: node.config.creator,
host: node.config.peerId,
pointers: [
{ localContext: 'users', target: userId, targetContext: 'mentor' },
{ localContext: 'mentor', target: mentorId }
]
});
node.lossless.ingestDelta(mentorshipDelta);
}
}
const setupTime = performance.now() - startSetup;
console.log(`Setup time for ${userCount} users with relationships: ${setupTime.toFixed(2)}ms`);
// Test resolution performance for a user with many connections
const testUserId = userIds[50]; // Pick a user in the middle
const userViews = node.lossless.view([testUserId]);
const userView = userViews[testUserId];
const startResolution = performance.now();
const nestedView = schemaRegistry.applySchemaWithNesting(
userView,
'network-user',
node.lossless,
{ maxDepth: 2 }
);
const resolutionTime = performance.now() - startResolution;
console.log(`Resolution time for user with many connections: ${resolutionTime.toFixed(2)}ms`);
// Verify the resolution worked
expect(nestedView.id).toBe(testUserId);
expect(nestedView.schemaId).toBe('network-user');
// Performance assertions (adjust thresholds based on acceptable performance)
expect(setupTime).toBeLessThan(5000); // Setup should take less than 5 seconds
expect(resolutionTime).toBeLessThan(1000); // Resolution should take less than 1 second
// Verify we got some nested objects
const totalNestedObjects = Object.values(nestedView.nestedObjects).reduce(
(total, arr) => total + (arr?.length || 0), 0
);
console.log('Total nested objects resolved:', totalNestedObjects);
// The test user should have friends, followers, and possibly a mentor
expect(Object.keys(nestedView.nestedObjects).length).toBeGreaterThan(0);
});
it('should handle deep nesting chains efficiently', async () => {
// Create a simple schema for chain testing
const chainUserSchema = SchemaBuilder
.create('chain-user')
.name('Chain User')
.property('name', PrimitiveSchemas.requiredString())
.property('next', ReferenceSchemas.to('chain-user-summary', 3))
.required('name')
.build();
const chainUserSummarySchema = SchemaBuilder
.create('chain-user-summary')
.name('Chain User Summary')
.property('name', PrimitiveSchemas.requiredString())
.required('name')
.additionalProperties(false)
.build();
schemaRegistry.register(chainUserSchema);
schemaRegistry.register(chainUserSummarySchema);
const userCollection = new TypedCollectionImpl<{
name: string;
next?: string;
}>('users', chainUserSchema, schemaRegistry);
userCollection.rhizomeConnect(node);
const startSetup = performance.now();
// Create a chain of 50 users
const chainLength = 50;
const userIds: string[] = [];
for (let i = 0; i < chainLength; i++) {
const userId = `chain-user${i}`;
userIds.push(userId);
await userCollection.put(userId, { name: `Chain User ${i}` });
}
// Link them in a chain
for (let i = 0; i < chainLength - 1; i++) {
const currentId = userIds[i];
const nextId = userIds[i + 1];
const linkDelta = new Delta({
creator: node.config.creator,
host: node.config.peerId,
pointers: [
{ localContext: 'users', target: currentId, targetContext: 'next' },
{ localContext: 'next', target: nextId }
]
});
node.lossless.ingestDelta(linkDelta);
}
const setupTime = performance.now() - startSetup;
console.log(`Setup time for chain of ${chainLength} users: ${setupTime.toFixed(2)}ms`);
// Test resolution from the start of the chain
const firstUserId = userIds[0];
const userViews = node.lossless.view([firstUserId]);
const userView = userViews[firstUserId];
const startResolution = performance.now();
const nestedView = schemaRegistry.applySchemaWithNesting(
userView,
'chain-user',
node.lossless,
{ maxDepth: 5 } // Should resolve 5 levels deep
);
const resolutionTime = performance.now() - startResolution;
console.log(`Resolution time for deep chain (maxDepth=5): ${resolutionTime.toFixed(2)}ms`);
// Verify the resolution worked and respected depth limits
expect(nestedView.id).toBe(firstUserId);
expect(nestedView.schemaId).toBe('chain-user');
// Performance assertions
expect(setupTime).toBeLessThan(2000); // Setup should take less than 2 seconds
expect(resolutionTime).toBeLessThan(500); // Resolution should take less than 500ms
// Verify depth was respected
let currentView = nestedView;
let depth = 0;
while (currentView.nestedObjects.next && currentView.nestedObjects.next.length > 0) {
currentView = currentView.nestedObjects.next[0];
depth++;
if (depth >= 5) break; // Prevent infinite loop
Review

This condition means the subsequent test of depth will always pass, regardless of actual depth

This condition means the subsequent test of depth will always pass, regardless of actual depth
}
expect(depth).toBeLessThanOrEqual(5);
console.log(`Actual resolved depth: ${depth}`);
});
it('should handle circular references in large graphs without performance degradation', async () => {
const circularUserSchema = SchemaBuilder
.create('circular-user')
.name('Circular User')
.property('name', PrimitiveSchemas.requiredString())
.property('connections', ArraySchemas.of(ReferenceSchemas.to('circular-user-summary', 3)))
.required('name')
.build();
const circularUserSummarySchema = SchemaBuilder
.create('circular-user-summary')
.name('Circular User Summary')
.property('name', PrimitiveSchemas.requiredString())
.required('name')
.additionalProperties(false)
.build();
schemaRegistry.register(circularUserSchema);
schemaRegistry.register(circularUserSummarySchema);
const userCollection = new TypedCollectionImpl<{
name: string;
connections?: string[];
}>('users', circularUserSchema, schemaRegistry);
userCollection.rhizomeConnect(node);
const startSetup = performance.now();
// Create 20 users
const userCount = 20;
const userIds: string[] = [];
for (let i = 0; i < userCount; i++) {
const userId = `circular-user${i}`;
userIds.push(userId);
await userCollection.put(userId, { name: `Circular User ${i}` });
}
// Create many circular connections - each user connects to 3 others
for (let i = 0; i < userCount; i++) {
const userId = userIds[i];
// Connect to next 3 users (wrapping around)
for (let j = 1; j <= 3; j++) {
const connectedIndex = (i + j) % userCount;
const connectedId = userIds[connectedIndex];
const connectionDelta = new Delta({
creator: node.config.creator,
host: node.config.peerId,
pointers: [
{ localContext: 'users', target: userId, targetContext: 'connections' },
{ localContext: 'connections', target: connectedId }
]
});
node.lossless.ingestDelta(connectionDelta);
}
}
const setupTime = performance.now() - startSetup;
console.log(`Setup time for circular graph with ${userCount} users: ${setupTime.toFixed(2)}ms`);
// Test resolution performance with circular references
const testUserId = userIds[0];
const userViews = node.lossless.view([testUserId]);
const userView = userViews[testUserId];
const startResolution = performance.now();
const nestedView = schemaRegistry.applySchemaWithNesting(
userView,
'circular-user',
node.lossless,
{ maxDepth: 3 }
);
const resolutionTime = performance.now() - startResolution;
console.log(`Resolution time for circular graph (maxDepth=3): ${resolutionTime.toFixed(2)}ms`);
// Verify the resolution completed without hanging
expect(nestedView.id).toBe(testUserId);
expect(nestedView.schemaId).toBe('circular-user');
// Performance assertions - should handle circular references efficiently
expect(setupTime).toBeLessThan(2000);
expect(resolutionTime).toBeLessThan(1000); // Should complete in reasonable time despite cycles
// Verify we got some nested objects but didn't get stuck in infinite loops
expect(nestedView.nestedObjects.connections).toBeDefined();
if (nestedView.nestedObjects.connections) {
expect(nestedView.nestedObjects.connections.length).toBeGreaterThan(0);
expect(nestedView.nestedObjects.connections.length).toBeLessThanOrEqual(3);
}
console.log(`Connections resolved: ${nestedView.nestedObjects.connections?.length || 0}`);
});
});
});

View File

@ -0,0 +1,443 @@
/**
* Test suite for nested object resolution with schema-controlled depth limiting
* and circular reference detection.
*
* Tests the implementation of:
* - Schema-controlled depth limiting
* - Circular reference detection and prevention
* - "Summary" schema type for references
* - Deep nesting scenarios
*/
import { RhizomeNode } from '../src/node';
import { Delta } from '../src/core';
import { DefaultSchemaRegistry } from '../src/schema';
import { CommonSchemas, SchemaBuilder, PrimitiveSchemas, ReferenceSchemas } from '../src/schema';
import { TypedCollectionImpl } from '../src/collections';
describe('Nested Object Resolution', () => {
let node: RhizomeNode;
let schemaRegistry: DefaultSchemaRegistry;
beforeEach(() => {
node = new RhizomeNode();
schemaRegistry = new DefaultSchemaRegistry();
// Register the common schemas
schemaRegistry.register(CommonSchemas.User());
schemaRegistry.register(CommonSchemas.UserSummary());
schemaRegistry.register(CommonSchemas.Document());
});
afterEach(() => {
// No cleanup needed for nodes that don't call start()
});
describe('Basic Reference Resolution', () => {
it('should resolve single-level user references with UserSummary schema', async () => {
const userCollection = new TypedCollectionImpl<{
name: string;
email?: string;
friends?: string[];
}>('users', CommonSchemas.User(), schemaRegistry);
userCollection.rhizomeConnect(node);
// Create test users
await userCollection.put('alice', {
name: 'Alice',
email: 'alice@test.com'
});
await userCollection.put('bob', {
name: 'Bob',
email: 'bob@test.com'
});
// Create friendship relationship
const friendshipDelta = new Delta({
creator: node.config.creator,
host: node.config.peerId,
pointers: [
{ localContext: 'users', target: 'alice', targetContext: 'friends' },
{ localContext: 'friends', target: 'bob' }
]
});
node.lossless.ingestDelta(friendshipDelta);
// Get Alice's lossless view
const aliceViews = node.lossless.view(['alice']);
const aliceView = aliceViews['alice'];
expect(aliceView).toBeDefined();
expect(aliceView.propertyDeltas.friends).toBeDefined();
expect(aliceView.propertyDeltas.friends.length).toBeGreaterThan(0);
// Apply schema with nesting
const nestedView = schemaRegistry.applySchemaWithNesting(
aliceView,
'user',
node.lossless,
{ maxDepth: 2 }
);
expect(nestedView.id).toBe('alice');
expect(nestedView.schemaId).toBe('user');
expect(nestedView.metadata?.depth).toBe(0);
expect(nestedView.metadata?.truncated).toBe(false);
// Check if friends are resolved as nested objects
if (nestedView.nestedObjects.friends) {
expect(nestedView.nestedObjects.friends.length).toBe(1);
const bobSummary = nestedView.nestedObjects.friends[0];
expect(bobSummary.id).toBe('bob');
expect(bobSummary.schemaId).toBe('user-summary');
expect(bobSummary.metadata?.depth).toBe(1);
}
});
it('should handle missing references gracefully', async () => {
const userCollection = new TypedCollectionImpl<{
name: string;
friends?: string[];
}>('users', CommonSchemas.User(), schemaRegistry);
userCollection.rhizomeConnect(node);
// Create user with reference to non-existent friend
await userCollection.put('alice', { name: 'Alice' });
const friendshipDelta = new Delta({
creator: node.config.creator,
host: node.config.peerId,
pointers: [
{ localContext: 'users', target: 'alice', targetContext: 'friends' },
{ localContext: 'friends', target: 'nonexistent' }
]
});
node.lossless.ingestDelta(friendshipDelta);
const aliceViews = node.lossless.view(['alice']);
const aliceView = aliceViews['alice'];
const nestedView = schemaRegistry.applySchemaWithNesting(
aliceView,
'user',
node.lossless,
{ maxDepth: 2 }
);
// Should not crash and should handle missing reference gracefully
expect(nestedView.id).toBe('alice');
// The friends array might be empty or have no resolved objects
if (nestedView.nestedObjects.friends) {
expect(Array.isArray(nestedView.nestedObjects.friends)).toBe(true);
}
});
});
describe('Depth Limiting', () => {
it('should respect maxDepth parameter and truncate deep nesting', async () => {
// Create a custom schema with deeper nesting for testing
const deepUserSchema = SchemaBuilder
.create('deep-user')
.name('Deep User')
.property('name', PrimitiveSchemas.requiredString())
.property('mentor', ReferenceSchemas.to('deep-user', 2)) // Self-reference with depth 2
.required('name')
.build();
schemaRegistry.register(deepUserSchema);
const userCollection = new TypedCollectionImpl<{
name: string;
mentor?: string;
}>('deep-users', deepUserSchema, schemaRegistry);
userCollection.rhizomeConnect(node);
// Create a chain: alice -> bob -> charlie
await userCollection.put('alice', { name: 'Alice' });
await userCollection.put('bob', { name: 'Bob' });
await userCollection.put('charlie', { name: 'Charlie' });
// Alice's mentor is Bob
const mentorshipDelta1 = new Delta({
creator: node.config.creator,
host: node.config.peerId,
pointers: [
{ localContext: 'deep-users', target: 'alice', targetContext: 'mentor' },
{ localContext: 'mentor', target: 'bob' }
]
});
node.lossless.ingestDelta(mentorshipDelta1);
// Bob's mentor is Charlie
const mentorshipDelta2 = new Delta({
creator: node.config.creator,
host: node.config.peerId,
pointers: [
{ localContext: 'deep-users', target: 'bob', targetContext: 'mentor' },
{ localContext: 'mentor', target: 'charlie' }
]
});
node.lossless.ingestDelta(mentorshipDelta2);
const aliceViews = node.lossless.view(['alice']);
const aliceView = aliceViews['alice'];
// Test with maxDepth = 1 (should only resolve Alice and Bob)
const shallowView = schemaRegistry.applySchemaWithNesting(
aliceView,
'deep-user',
node.lossless,
{ maxDepth: 1 }
);
expect(shallowView.id).toBe('alice');
expect(shallowView.metadata?.depth).toBe(0);
expect(shallowView.metadata?.truncated).toBe(false);
if (shallowView.nestedObjects.mentor) {
expect(shallowView.nestedObjects.mentor.length).toBe(1);
const bobView = shallowView.nestedObjects.mentor[0];
expect(bobView.id).toBe('bob');
expect(bobView.metadata?.depth).toBe(1);
expect(bobView.metadata?.truncated).toBe(true); // Should be truncated at depth 1
// Bob's mentor should not be resolved due to depth limit
expect(bobView.nestedObjects.mentor || []).toHaveLength(0);
}
// Test with maxDepth = 2 (should resolve Alice, Bob, and Charlie)
const deepView = schemaRegistry.applySchemaWithNesting(
aliceView,
'deep-user',
node.lossless,
{ maxDepth: 2 }
);
if (deepView.nestedObjects.mentor) {
const bobView = deepView.nestedObjects.mentor[0];
expect(bobView.metadata?.truncated).toBe(false);
if (bobView.nestedObjects.mentor) {
expect(bobView.nestedObjects.mentor.length).toBe(1);
const charlieView = bobView.nestedObjects.mentor[0];
expect(charlieView.id).toBe('charlie');
expect(charlieView.metadata?.depth).toBe(2);
expect(charlieView.metadata?.truncated).toBe(true); // Truncated at max depth
}
}
});
});
describe('Circular Reference Prevention', () => {
it('should detect and prevent circular references', async () => {
const userCollection = new TypedCollectionImpl<{
name: string;
friends?: string[];
}>('users', CommonSchemas.User(), schemaRegistry);
userCollection.rhizomeConnect(node);
// Create users
await userCollection.put('alice', { name: 'Alice' });
await userCollection.put('bob', { name: 'Bob' });
// Create circular friendship: Alice -> Bob -> Alice
const friendship1 = new Delta({
creator: node.config.creator,
host: node.config.peerId,
pointers: [
{ localContext: 'users', target: 'alice', targetContext: 'friends' },
{ localContext: 'friends', target: 'bob' }
]
});
node.lossless.ingestDelta(friendship1);
const friendship2 = new Delta({
creator: node.config.creator,
host: node.config.peerId,
pointers: [
{ localContext: 'users', target: 'bob', targetContext: 'friends' },
{ localContext: 'friends', target: 'alice' }
]
});
node.lossless.ingestDelta(friendship2);
const aliceViews = node.lossless.view(['alice']);
const aliceView = aliceViews['alice'];
// Should handle circular reference without infinite recursion
const nestedView = schemaRegistry.applySchemaWithNesting(
aliceView,
'user',
node.lossless,
{ maxDepth: 3 }
);
expect(nestedView.id).toBe('alice');
// The resolution should complete without hanging or crashing
// The exact behavior may vary, but it should not cause infinite recursion
expect(nestedView.metadata?.truncated).toBeDefined();
});
it('should handle self-references correctly', async () => {
const userCollection = new TypedCollectionImpl<{
name: string;
friends?: string[];
}>('users', CommonSchemas.User(), schemaRegistry);
userCollection.rhizomeConnect(node);
await userCollection.put('alice', { name: 'Alice' });
// Alice is friends with herself
const selfFriendship = new Delta({
creator: node.config.creator,
host: node.config.peerId,
pointers: [
{ localContext: 'users', target: 'alice', targetContext: 'friends' },
{ localContext: 'friends', target: 'alice' }
]
});
node.lossless.ingestDelta(selfFriendship);
const aliceViews = node.lossless.view(['alice']);
const aliceView = aliceViews['alice'];
const nestedView = schemaRegistry.applySchemaWithNesting(
aliceView,
'user',
node.lossless,
{ maxDepth: 2 }
);
expect(nestedView.id).toBe('alice');
// Should detect the self-reference and handle it appropriately
});
});
describe('Array References', () => {
it('should resolve arrays of references correctly', async () => {
const userCollection = new TypedCollectionImpl<{
name: string;
friends?: string[];
}>('users', CommonSchemas.User(), schemaRegistry);
userCollection.rhizomeConnect(node);
// Create multiple users
await userCollection.put('alice', { name: 'Alice' });
await userCollection.put('bob', { name: 'Bob' });
await userCollection.put('charlie', { name: 'Charlie' });
// Alice has multiple friends
const friendship1 = new Delta({
creator: node.config.creator,
host: node.config.peerId,
pointers: [
{ localContext: 'users', target: 'alice', targetContext: 'friends' },
{ localContext: 'friends', target: 'bob' }
]
});
node.lossless.ingestDelta(friendship1);
const friendship2 = new Delta({
creator: node.config.creator,
host: node.config.peerId,
pointers: [
{ localContext: 'users', target: 'alice', targetContext: 'friends' },
{ localContext: 'friends', target: 'charlie' }
]
});
node.lossless.ingestDelta(friendship2);
const aliceViews = node.lossless.view(['alice']);
const aliceView = aliceViews['alice'];
const nestedView = schemaRegistry.applySchemaWithNesting(
aliceView,
'user',
node.lossless,
{ maxDepth: 2 }
);
expect(nestedView.id).toBe('alice');
if (nestedView.nestedObjects.friends) {
expect(nestedView.nestedObjects.friends.length).toBe(2);
const friendIds = nestedView.nestedObjects.friends.map((friend: { id: string }) => friend.id);
expect(friendIds).toContain('bob');
expect(friendIds).toContain('charlie');
// All friends should use the user-summary schema
nestedView.nestedObjects.friends.forEach((friend: { schemaId: string; metadata?: { depth: number } }) => {
expect(friend.schemaId).toBe('user-summary');
expect(friend.metadata?.depth).toBe(1);
});
}
});
});
describe('Summary Schema Pattern', () => {
it('should use Summary schema to break infinite recursion', async () => {
// The User schema references user-summary for friends
// This tests the pattern mentioned in the spec
const userCollection = new TypedCollectionImpl<{
name: string;
email?: string;
friends?: string[];
}>('users', CommonSchemas.User(), schemaRegistry);
userCollection.rhizomeConnect(node);
await userCollection.put('alice', {
name: 'Alice',
email: 'alice@test.com'
});
await userCollection.put('bob', {
name: 'Bob',
email: 'bob@test.com'
});
// Create friendship
const friendship = new Delta({
creator: node.config.creator,
host: node.config.peerId,
pointers: [
{ localContext: 'users', target: 'alice', targetContext: 'friends' },
{ localContext: 'friends', target: 'bob' }
]
});
node.lossless.ingestDelta(friendship);
const aliceViews = node.lossless.view(['alice']);
const aliceView = aliceViews['alice'];
const nestedView = schemaRegistry.applySchemaWithNesting(
aliceView,
'user',
node.lossless,
{ maxDepth: 3 }
);
if (nestedView.nestedObjects.friends) {
const bobSummary = nestedView.nestedObjects.friends[0];
// Bob should be resolved with user-summary schema
expect(bobSummary.schemaId).toBe('user-summary');
// user-summary schema should have limited properties (only name and email)
expect(bobSummary.properties.name).toBeDefined();
expect(bobSummary.properties.email).toBeDefined();
// user-summary should NOT have friends property to break recursion
expect(bobSummary.properties.friends).toBeUndefined();
expect(bobSummary.nestedObjects.friends).toBeUndefined();
}
});
});
});

View File

@ -1,4 +1,4 @@
import {parseAddressList, PeerAddress} from '../src/peers';
import {parseAddressList, PeerAddress} from '../src/network/peers';
describe('PeerAddress', () => {
it('toString()', () => {

View File

@ -1,4 +1,345 @@
describe.skip('Query', () => {
it('can use a json logic expression to filter the queries', () => {});
it('can use a json logic expression to implement a lossy resolver', () => {});
import { QueryEngine } from '../src/query';
import { Lossless } from '../src/views';
import { DefaultSchemaRegistry } from '../src/schema';
import { CommonSchemas, SchemaBuilder, PrimitiveSchemas } from '../src/schema';
import { Delta } from '../src/core';
import { RhizomeNode } from '../src/node';
describe('Query Engine', () => {
let queryEngine: QueryEngine;
let lossless: Lossless;
let schemaRegistry: DefaultSchemaRegistry;
let rhizomeNode: RhizomeNode;
beforeEach(async () => {
rhizomeNode = new RhizomeNode({
peerId: 'test-query-node',
publishBindPort: 4002,
requestBindPort: 4003
});
lossless = rhizomeNode.lossless;
schemaRegistry = new DefaultSchemaRegistry();
queryEngine = new QueryEngine(lossless, schemaRegistry);
// Register test schemas
schemaRegistry.register(CommonSchemas.User());
schemaRegistry.register(CommonSchemas.UserSummary());
// Create a custom test schema
const blogPostSchema = SchemaBuilder
.create('blog-post')
.name('Blog Post')
.property('title', PrimitiveSchemas.requiredString())
.property('content', PrimitiveSchemas.string())
.property('author', PrimitiveSchemas.requiredString())
.property('published', PrimitiveSchemas.boolean())
.property('views', PrimitiveSchemas.number())
.required('title', 'author')
.build();
schemaRegistry.register(blogPostSchema);
});
afterEach(async () => {
// No cleanup needed for now
});
async function createUser(id: string, name: string, age?: number, email?: string) {
// Create user entity with name
const nameDelta = new Delta({
id: `delta-${id}-name-${Date.now()}`,
creator: 'test',
host: 'test-host',
timeCreated: Date.now(),
pointers: [
{ localContext: 'user', target: id, targetContext: 'name' },
{ localContext: 'value', target: name }
]
});
lossless.ingestDelta(nameDelta);
// Add age if provided
if (age !== undefined) {
const ageDelta = new Delta({
id: `delta-${id}-age-${Date.now()}`,
creator: 'test',
host: 'test-host',
timeCreated: Date.now(),
pointers: [
{ localContext: 'user', target: id, targetContext: 'age' },
{ localContext: 'value', target: age }
]
});
lossless.ingestDelta(ageDelta);
}
// Add email if provided
if (email) {
const emailDelta = new Delta({
id: `delta-${id}-email-${Date.now()}`,
creator: 'test',
host: 'test-host',
timeCreated: Date.now(),
pointers: [
{ localContext: 'user', target: id, targetContext: 'email' },
{ localContext: 'value', target: email }
]
});
lossless.ingestDelta(emailDelta);
}
}
async function createBlogPost(id: string, title: string, author: string, published = false, views = 0) {
// Title delta
const titleDelta = new Delta({
id: `delta-${id}-title-${Date.now()}`,
creator: 'test',
host: 'test-host',
timeCreated: Date.now(),
pointers: [
{ localContext: 'post', target: id, targetContext: 'title' },
{ localContext: 'value', target: title }
]
});
lossless.ingestDelta(titleDelta);
// Author delta
const authorDelta = new Delta({
id: `delta-${id}-author-${Date.now()}`,
creator: 'test',
host: 'test-host',
timeCreated: Date.now(),
pointers: [
{ localContext: 'post', target: id, targetContext: 'author' },
{ localContext: 'value', target: author }
]
});
lossless.ingestDelta(authorDelta);
// Published delta
const publishedDelta = new Delta({
id: `delta-${id}-published-${Date.now()}`,
creator: 'test',
host: 'test-host',
timeCreated: Date.now(),
pointers: [
{ localContext: 'post', target: id, targetContext: 'published' },
{ localContext: 'value', target: published }
]
});
lossless.ingestDelta(publishedDelta);
// Views delta
const viewsDelta = new Delta({
id: `delta-${id}-views-${Date.now()}`,
creator: 'test',
host: 'test-host',
timeCreated: Date.now(),
pointers: [
{ localContext: 'post', target: id, targetContext: 'views' },
{ localContext: 'value', target: views }
]
});
lossless.ingestDelta(viewsDelta);
}
describe('Basic Query Operations', () => {
it('can query all entities of a schema type', async () => {
// Create test users
await createUser('user1', 'Alice', 25, 'alice@example.com');
await createUser('user2', 'Bob', 30);
await createUser('user3', 'Charlie', 35, 'charlie@example.com');
const result = await queryEngine.query('user');
expect(result.totalFound).toBe(3);
expect(result.limited).toBe(false);
expect(Object.keys(result.entities)).toHaveLength(3);
expect(result.entities['user1']).toBeDefined();
expect(result.entities['user2']).toBeDefined();
expect(result.entities['user3']).toBeDefined();
});
it('can query a single entity by ID', async () => {
await createUser('user1', 'Alice', 25, 'alice@example.com');
const result = await queryEngine.queryOne('user', 'user1');
expect(result).toBeDefined();
expect(result?.id).toBe('user1');
expect(result?.propertyDeltas.name).toBeDefined();
expect(result?.propertyDeltas.age).toBeDefined();
expect(result?.propertyDeltas.email).toBeDefined();
});
it('returns null for non-existent entity', async () => {
const result = await queryEngine.queryOne('user', 'nonexistent');
expect(result).toBeNull();
});
});
describe('JSON Logic Filtering', () => {
beforeEach(async () => {
// Create test data
await createUser('user1', 'Alice', 25, 'alice@example.com');
await createUser('user2', 'Bob', 30, 'bob@example.com');
await createUser('user3', 'Charlie', 35, 'charlie@example.com');
await createUser('user4', 'Diana', 20);
});
it('can filter by primitive property values', async () => {
// Find users older than 28
const result = await queryEngine.query('user', {
'>': [{ 'var': 'age' }, 28]
});
expect(result.totalFound).toBe(2);
expect(result.entities['user2']).toBeDefined(); // Bob, 30
expect(result.entities['user3']).toBeDefined(); // Charlie, 35
expect(result.entities['user1']).toBeUndefined(); // Alice, 25
expect(result.entities['user4']).toBeUndefined(); // Diana, 20
});
it('can filter by string properties', async () => {
// Find users with name starting with 'A' - using substring check instead of startsWith
const result = await queryEngine.query('user', {
'in': ['A', { 'var': 'name' }]
});
expect(result.totalFound).toBe(1);
expect(result.entities['user1']).toBeDefined(); // Alice
});
it('can filter by null/missing properties', async () => {
// Find users without email
const result = await queryEngine.query('user', {
'==': [{ 'var': 'email' }, null]
});
expect(result.totalFound).toBe(1);
expect(result.entities['user4']).toBeDefined(); // Diana has no email
});
it('can use complex logic expressions', async () => {
// Find users who are (older than 30) OR (younger than 25 AND have email)
const result = await queryEngine.query('user', {
'or': [
{ '>': [{ 'var': 'age' }, 30] },
{
'and': [
{ '<': [{ 'var': 'age' }, 25] },
{ '!=': [{ 'var': 'email' }, null] }
]
}
]
});
expect(result.totalFound).toBe(1);
expect(result.entities['user3']).toBeDefined(); // Charlie, 35 (older than 30)
// Diana is younger than 25 but has no email
// Alice is 25, not younger than 25
});
});
describe('Blog Post Queries', () => {
beforeEach(async () => {
await createBlogPost('post1', 'Introduction to Rhizome', 'alice', true, 150);
await createBlogPost('post2', 'Advanced Queries', 'bob', true, 75);
await createBlogPost('post3', 'Draft Post', 'alice', false, 0);
await createBlogPost('post4', 'Popular Post', 'charlie', true, 1000);
});
it('can filter published posts', async () => {
const result = await queryEngine.query('blog-post', {
'==': [{ 'var': 'published' }, true]
});
expect(result.totalFound).toBe(3);
expect(result.entities['post1']).toBeDefined();
expect(result.entities['post2']).toBeDefined();
expect(result.entities['post4']).toBeDefined();
expect(result.entities['post3']).toBeUndefined(); // Draft
});
it('can filter by author', async () => {
const result = await queryEngine.query('blog-post', {
'==': [{ 'var': 'author' }, 'alice']
});
expect(result.totalFound).toBe(2);
expect(result.entities['post1']).toBeDefined();
expect(result.entities['post3']).toBeDefined();
});
it('can filter by view count ranges', async () => {
// Posts with more than 100 views
const result = await queryEngine.query('blog-post', {
'>': [{ 'var': 'views' }, 100]
});
expect(result.totalFound).toBe(2);
expect(result.entities['post1']).toBeDefined(); // 150 views
expect(result.entities['post4']).toBeDefined(); // 1000 views
});
});
describe('Query Options', () => {
beforeEach(async () => {
for (let i = 1; i <= 10; i++) {
await createUser(`user${i}`, `User${i}`, 20 + i);
}
});
it('can limit query results', async () => {
const result = await queryEngine.query('user', undefined, { maxResults: 5 });
expect(result.totalFound).toBe(10);
expect(result.limited).toBe(true);
expect(Object.keys(result.entities)).toHaveLength(5);
});
it('respects delta filters', async () => {
const result = await queryEngine.query('user', undefined, {
deltaFilter: (delta) => delta.creator === 'test'
});
expect(result.totalFound).toBe(10);
expect(result.limited).toBe(false);
});
});
describe('Statistics', () => {
it('provides query engine statistics', async () => {
await createUser('user1', 'Alice', 25);
await createBlogPost('post1', 'Test Post', 'alice', true, 50);
const stats = queryEngine.getStats();
expect(stats.totalEntities).toBe(2);
expect(stats.registeredSchemas).toBeGreaterThan(0);
expect(stats.schemasById['user']).toBe(1);
expect(stats.schemasById['blog-post']).toBe(1);
});
});
describe('Error Handling', () => {
it('handles invalid schema IDs gracefully', async () => {
const result = await queryEngine.query('nonexistent-schema');
expect(result.totalFound).toBe(0);
expect(Object.keys(result.entities)).toHaveLength(0);
});
it('handles malformed JSON Logic expressions', async () => {
await createUser('user1', 'Alice', 25);
const result = await queryEngine.query('user', {
'invalid-operator': [{ 'var': 'age' }, 25]
});
// Should not crash, may return empty results or skip problematic entities
Review

Seems like we should probably return an error to the caller in this case?

Seems like we should probably return an error to the caller in this case?
expect(result).toBeDefined();
expect(typeof result.totalFound).toBe('number');
});
});
});

View File

@ -1,13 +0,0 @@
describe('Relational', () => {
it.skip('Allows expressing a domain ontology as a relational schema', async () => {});
// Deltas can be filtered at time of view resolution, and
// excluded if they violate schema constraints;
// Ideally the sender minimizes this by locally validating against the constraints.
// For cases where deltas conflict, there can be a resolution process,
// with configurable parameters such as duration, quorum, and so on;
// or a deterministic algorithm can be applied.
it.skip('Can validate a delta against a relational constraint', async () => {});
it.skip('Can validate a delta against a set of relational constraints', async () => {});
});

421
__tests__/schema.ts Normal file
View File

@ -0,0 +1,421 @@
import {
SchemaBuilder,
PrimitiveSchemas,
ReferenceSchemas,
ArraySchemas,
CommonSchemas,
ObjectSchema
} from '../src/schema';
import { DefaultSchemaRegistry } from '../src/schema';
import { TypedCollectionImpl, SchemaValidationError } from '../src/collections';
import { RhizomeNode } from '../src/node';
import { Delta } from '../src/core';
describe('Schema System', () => {
let schemaRegistry: DefaultSchemaRegistry;
let node: RhizomeNode;
beforeEach(() => {
schemaRegistry = new DefaultSchemaRegistry();
node = new RhizomeNode();
});
describe('Schema Builder', () => {
it('should create a basic schema', () => {
const schema = SchemaBuilder
.create('user')
.name('User')
.description('A user entity')
.property('name', PrimitiveSchemas.requiredString())
.property('age', PrimitiveSchemas.number())
.property('active', PrimitiveSchemas.boolean())
.required('name')
.build();
expect(schema.id).toBe('user');
expect(schema.name).toBe('User');
expect(schema.description).toBe('A user entity');
expect(schema.properties.name).toEqual({
type: 'primitive',
primitiveType: 'string',
required: true
});
expect(schema.requiredProperties).toContain('name');
});
it('should create schema with references', () => {
const schema = SchemaBuilder
.create('post')
.name('Post')
.property('title', PrimitiveSchemas.requiredString())
.property('author', ReferenceSchemas.required('user'))
.property('tags', ArraySchemas.of(PrimitiveSchemas.string()))
.build();
expect(schema.properties.author).toEqual({
type: 'reference',
targetSchema: 'user',
maxDepth: 3,
required: true
});
expect(schema.properties.tags).toEqual({
type: 'array',
itemSchema: { type: 'primitive', primitiveType: 'string' }
});
});
it('should enforce required fields', () => {
expect(() => {
SchemaBuilder.create('').build();
}).toThrow('Schema must have id and name');
expect(() => {
SchemaBuilder.create('test').build();
}).toThrow('Schema must have id and name');
});
});
describe('Schema Registry', () => {
it('should register and retrieve schemas', () => {
const schema = CommonSchemas.User();
schemaRegistry.register(schema);
const retrieved = schemaRegistry.get('user');
expect(retrieved).toEqual(schema);
const all = schemaRegistry.list();
expect(all).toContain(schema);
});
it('should validate schema structure', () => {
const invalidSchema = {
id: 'invalid',
name: 'Invalid',
properties: {
badProperty: { type: 'unknown' } as never
}
};
expect(() => {
schemaRegistry.register(invalidSchema as ObjectSchema);
}).toThrow('Unknown schema type');
});
it('should validate required properties exist', () => {
const schema = SchemaBuilder
.create('test')
.name('Test')
.property('name', PrimitiveSchemas.string())
.required('name', 'nonexistent')
.build();
expect(() => {
schemaRegistry.register(schema);
}).toThrow("Required property 'nonexistent' not found");
});
it('should detect circular dependencies', () => {
// Create schemas with circular references
const userSchema = SchemaBuilder
.create('user')
.name('User')
.property('name', PrimitiveSchemas.string())
.property('bestFriend', ReferenceSchemas.to('user'))
.build();
schemaRegistry.register(userSchema);
// This is circular (self-reference)
expect(schemaRegistry.hasCircularDependencies()).toBe(true);
// Create actual circular dependency
const groupSchema = SchemaBuilder
.create('group')
.name('Group')
.property('owner', ReferenceSchemas.to('user'))
.build();
const userWithGroupSchema = SchemaBuilder
.create('user-with-group')
.name('User With Group')
.property('group', ReferenceSchemas.to('group'))
.build();
schemaRegistry.register(groupSchema);
schemaRegistry.register(userWithGroupSchema);
// Still circular due to the self-referencing user schema
expect(schemaRegistry.hasCircularDependencies()).toBe(true);
});
it('should validate lossless views against schemas', () => {
const userSchema = CommonSchemas.User();
schemaRegistry.register(userSchema);
// Create a valid lossless view
const validView = {
id: 'user123',
referencedAs: ['user'],
propertyDeltas: {
name: [{
id: 'delta1',
timeCreated: 123,
host: 'host1',
creator: 'creator1',
pointers: [{ name: 'Alice' }]
}],
age: [{
id: 'delta2',
timeCreated: 124,
host: 'host1',
creator: 'creator1',
pointers: [{ age: 25 }]
}]
}
};
const result = schemaRegistry.validate('user123', 'user', validView);
expect(result.valid).toBe(true);
expect(result.errors).toHaveLength(0);
// Test invalid view (missing required property)
const invalidView = {
id: 'user456',
referencedAs: ['user'],
propertyDeltas: {
age: [{
id: 'delta3',
timeCreated: 125,
host: 'host1',
creator: 'creator1',
pointers: [{ age: 30 }]
}]
}
};
const invalidResult = schemaRegistry.validate('user456', 'user', invalidView);
expect(invalidResult.valid).toBe(false);
expect(invalidResult.errors).toContainEqual(
expect.objectContaining({
property: 'name',
message: expect.stringContaining('Required property')
})
);
});
it('should validate primitive types', () => {
const schema = SchemaBuilder
.create('test')
.name('Test')
.property('stringProp', PrimitiveSchemas.string())
.property('numberProp', PrimitiveSchemas.number())
.property('booleanProp', PrimitiveSchemas.boolean())
.build();
schemaRegistry.register(schema);
// Valid types
const validView = {
id: 'test1',
referencedAs: [],
propertyDeltas: {
stringProp: [{ id: 'd1', timeCreated: 1, host: 'h', creator: 'c', pointers: [{ stringProp: 'hello' }] }],
numberProp: [{ id: 'd2', timeCreated: 1, host: 'h', creator: 'c', pointers: [{ numberProp: 42 }] }],
booleanProp: [{ id: 'd3', timeCreated: 1, host: 'h', creator: 'c', pointers: [{ booleanProp: true }] }]
}
};
const validResult = schemaRegistry.validate('test1', 'test', validView);
expect(validResult.valid).toBe(true);
// Invalid types
const invalidView = {
id: 'test2',
referencedAs: [],
propertyDeltas: {
stringProp: [{ id: 'd4', timeCreated: 1, host: 'h', creator: 'c', pointers: [{ stringProp: 123 as never }] }],
numberProp: [{ id: 'd5', timeCreated: 1, host: 'h', creator: 'c', pointers: [{ numberProp: 'not-number' as never }] }]
}
};
const invalidResult = schemaRegistry.validate('test2', 'test', invalidView);
expect(invalidResult.valid).toBe(false);
expect(invalidResult.errors).toHaveLength(2);
});
});
describe('Typed Collection', () => {
it('should create typed collection with schema validation', () => {
const userSchema = CommonSchemas.User();
const collection = new TypedCollectionImpl<{
name: string;
email?: string;
age?: number;
active?: boolean;
}>('users', userSchema, schemaRegistry);
expect(collection.schema).toEqual(userSchema);
expect(collection.name).toBe('users');
});
it('should validate entities against schema', () => {
const userSchema = CommonSchemas.User();
const collection = new TypedCollectionImpl<{
name: string;
email?: string;
age?: number;
}>('users', userSchema, schemaRegistry);
// Valid entity
const validUser = { name: 'Alice', email: 'alice@test.com', age: 25 };
const validResult = collection.validate(validUser);
expect(validResult.valid).toBe(true);
// Invalid entity (missing required name)
const invalidUser = { email: 'bob@test.com', age: 30 } as never;
const invalidResult = collection.validate(invalidUser);
expect(invalidResult.valid).toBe(false);
});
it('should enforce strict validation on put operations', async () => {
const userSchema = CommonSchemas.User();
const collection = new TypedCollectionImpl<{
name: string;
email?: string;
}>('users', userSchema, schemaRegistry, { strictValidation: true });
collection.rhizomeConnect(node);
// Valid put should succeed
await expect(collection.put('user1', { name: 'Alice' })).resolves.toBeDefined();
// Invalid put should fail
await expect(collection.put('user2', { email: 'invalid@test.com' })).rejects.toThrow(SchemaValidationError);
});
it('should provide validation statistics', async () => {
const userSchema = CommonSchemas.User();
const collection = new TypedCollectionImpl<{
name: string;
email?: string;
}>('users', userSchema, schemaRegistry);
collection.rhizomeConnect(node);
// Add some entities
await collection.put('user1', { name: 'Alice', email: 'alice@test.com' });
await collection.put('user2', { name: 'Bob' });
// Manually create an invalid entity by bypassing validation
const invalidDelta = new Delta({
creator: node.config.creator,
host: node.config.peerId,
pointers: [
{ localContext: 'users', target: 'user3', targetContext: 'email' },
{ localContext: 'email', target: 'invalid@test.com' }
]
});
node.lossless.ingestDelta(invalidDelta);
const stats = collection.getValidationStats();
expect(stats.totalEntities).toBe(3);
expect(stats.validEntities).toBe(2);
expect(stats.invalidEntities).toBe(1);
});
it('should filter valid and invalid entities', async () => {
const userSchema = CommonSchemas.User();
const collection = new TypedCollectionImpl<{
name: string;
email?: string;
}>('users', userSchema, schemaRegistry);
collection.rhizomeConnect(node);
await collection.put('user1', { name: 'Alice' });
await collection.put('user2', { name: 'Bob' });
// Create invalid entity manually
const invalidDelta = new Delta({
creator: node.config.creator,
host: node.config.peerId,
pointers: [
{ localContext: 'users', target: 'user3', targetContext: 'age' },
{ localContext: 'age', target: 'not-a-number' }
]
});
node.lossless.ingestDelta(invalidDelta);
const validIds = collection.getValidEntities();
expect(validIds).toContain('user1');
expect(validIds).toContain('user2');
expect(validIds).not.toContain('user3');
const invalidEntities = collection.getInvalidEntities();
expect(invalidEntities).toHaveLength(1);
expect(invalidEntities[0].entityId).toBe('user3');
});
it('should apply schema to lossless views', async () => {
const userSchema = CommonSchemas.User();
const collection = new TypedCollectionImpl<{
name: string;
age?: number;
}>('users', userSchema, schemaRegistry);
collection.rhizomeConnect(node);
await collection.put('user1', { name: 'Alice', age: 25 });
const validatedView = collection.getValidatedView('user1');
expect(validatedView).toBeDefined();
expect(validatedView!.schemaId).toBe('user');
expect(validatedView!.properties.name).toBeDefined();
expect(validatedView!.properties.age).toBeDefined();
expect(validatedView!.metadata?.appliedAt).toBeDefined();
});
it('should provide schema introspection', () => {
const userSchema = CommonSchemas.User();
schemaRegistry.register(CommonSchemas.UserSummary());
const collection = new TypedCollectionImpl<{
name: string;
}>('users', userSchema, schemaRegistry);
const schemaInfo = collection.getSchemaInfo();
expect(schemaInfo.schema).toEqual(userSchema);
expect(schemaInfo.dependencies).toContain('user-summary');
expect(schemaInfo.hasCircularDependencies).toBe(false);
});
});
describe('Common Schemas', () => {
it('should provide working User schema', () => {
const userSchema = CommonSchemas.User();
expect(userSchema.id).toBe('user');
expect(userSchema.name).toBe('User');
expect(userSchema.properties.name).toBeDefined();
expect(userSchema.properties.friends).toBeDefined();
expect(userSchema.requiredProperties).toContain('name');
});
it('should provide working Document schema', () => {
const docSchema = CommonSchemas.Document();
expect(docSchema.id).toBe('document');
expect(docSchema.properties.title).toBeDefined();
expect(docSchema.properties.author).toBeDefined();
expect(docSchema.requiredProperties).toContain('title');
expect(docSchema.requiredProperties).toContain('author');
});
it('should work together in a registry', () => {
schemaRegistry.register(CommonSchemas.User());
schemaRegistry.register(CommonSchemas.UserSummary());
schemaRegistry.register(CommonSchemas.Document());
expect(schemaRegistry.list()).toHaveLength(3);
expect(schemaRegistry.hasCircularDependencies()).toBe(false); // No circular deps in CommonSchemas
});
});
});

259
__tests__/storage.ts Normal file
View File

@ -0,0 +1,259 @@
import { MemoryDeltaStorage, LevelDBDeltaStorage, StorageFactory } from '../src/storage';
import { Delta } from '../src/core';
import { DeltaQueryStorage } from '../src/storage/interface';
describe('Delta Storage', () => {
const testDeltas = [
new Delta({
id: 'delta1',
creator: 'alice',
host: 'host1',
timeCreated: Date.now() - 1000,
pointers: [
{ localContext: 'user', target: 'user1', targetContext: 'name' },
{ localContext: 'value', target: 'Alice' }
]
}),
new Delta({
id: 'delta2',
creator: 'bob',
host: 'host1',
timeCreated: Date.now() - 500,
pointers: [
{ localContext: 'user', target: 'user1', targetContext: 'age' },
{ localContext: 'value', target: 25 }
]
}),
new Delta({
id: 'delta3',
creator: 'alice',
host: 'host2',
timeCreated: Date.now(),
pointers: [
{ localContext: 'user', target: 'user2', targetContext: 'name' },
{ localContext: 'value', target: 'Bob' }
]
})
];
describe('Memory Storage', () => {
let storage: DeltaQueryStorage;
beforeEach(() => {
storage = new MemoryDeltaStorage();
});
afterEach(async () => {
await storage.close();
});
runStorageTests(() => storage as DeltaQueryStorage);
});
describe('LevelDB Storage', () => {
let storage: DeltaQueryStorage;
beforeEach(async () => {
storage = new LevelDBDeltaStorage('./test-data/leveldb-test');
await (storage as LevelDBDeltaStorage).open();
await (storage as LevelDBDeltaStorage).clearAll();
});
afterEach(async () => {
await storage.close();
});
runStorageTests(() => storage);
});
describe('Storage Factory', () => {
it('creates memory storage', () => {
const storage = StorageFactory.create({ type: 'memory' });
expect(storage).toBeInstanceOf(MemoryDeltaStorage);
});
it('creates LevelDB storage', () => {
const storage = StorageFactory.create({
type: 'leveldb',
path: './test-data/factory-test'
});
expect(storage).toBeInstanceOf(LevelDBDeltaStorage);
});
it('throws on unknown storage type', () => {
expect(() => {
StorageFactory.create({ type: 'unknown' as 'memory' | 'leveldb' });
}).toThrow('Unknown storage type: unknown');
});
});
function runStorageTests(getStorage: () => DeltaQueryStorage) {
it('stores and retrieves deltas', async () => {
const storage = getStorage();
// Store deltas
for (const delta of testDeltas) {
await storage.storeDelta(delta);
}
// Retrieve individual deltas
const delta1 = await storage.getDelta('delta1');
expect(delta1).toBeDefined();
expect(delta1!.id).toBe('delta1');
expect(delta1!.creator).toBe('alice');
// Test non-existent delta
const nonExistent = await storage.getDelta('nonexistent');
expect(nonExistent).toBeNull();
});
it('gets all deltas', async () => {
const storage = getStorage();
for (const delta of testDeltas) {
await storage.storeDelta(delta);
}
const allDeltas = await storage.getAllDeltas();
expect(allDeltas).toHaveLength(3);
const deltaIds = allDeltas.map(d => d.id);
expect(deltaIds).toContain('delta1');
expect(deltaIds).toContain('delta2');
expect(deltaIds).toContain('delta3');
});
it('filters deltas', async () => {
const storage = getStorage();
for (const delta of testDeltas) {
await storage.storeDelta(delta);
}
// Filter by creator
const aliceDeltas = await storage.getAllDeltas(d => d.creator === 'alice');
expect(aliceDeltas).toHaveLength(2);
expect(aliceDeltas.every(d => d.creator === 'alice')).toBe(true);
});
it('gets deltas for entity', async () => {
const storage = getStorage();
for (const delta of testDeltas) {
await storage.storeDelta(delta);
}
const user1Deltas = await storage.getDeltasForEntity('user1');
expect(user1Deltas).toHaveLength(2);
const user2Deltas = await storage.getDeltasForEntity('user2');
expect(user2Deltas).toHaveLength(1);
const nonExistentDeltas = await storage.getDeltasForEntity('user999');
expect(nonExistentDeltas).toHaveLength(0);
});
it('gets deltas by context', async () => {
const storage = getStorage();
for (const delta of testDeltas) {
await storage.storeDelta(delta);
}
const nameDeltas = await storage.getDeltasByContext('user1', 'name');
expect(nameDeltas).toHaveLength(1);
expect(nameDeltas[0].id).toBe('delta1');
const ageDeltas = await storage.getDeltasByContext('user1', 'age');
expect(ageDeltas).toHaveLength(1);
expect(ageDeltas[0].id).toBe('delta2');
const nonExistentDeltas = await storage.getDeltasByContext('user1', 'email');
expect(nonExistentDeltas).toHaveLength(0);
});
it('queries deltas with complex criteria', async () => {
const storage = getStorage();
for (const delta of testDeltas) {
await storage.storeDelta(delta);
}
// Query by creator
const aliceDeltas = await storage.queryDeltas({ creator: 'alice' });
expect(aliceDeltas).toHaveLength(2);
// Query by host
const host1Deltas = await storage.queryDeltas({ host: 'host1' });
expect(host1Deltas).toHaveLength(2);
// Query by entity
const user1Deltas = await storage.queryDeltas({ targetEntities: ['user1'] });
expect(user1Deltas).toHaveLength(2);
// Query by context
const nameDeltas = await storage.queryDeltas({ contexts: ['name'] });
expect(nameDeltas).toHaveLength(2);
// Combined query
const aliceUser1Deltas = await storage.queryDeltas({
creator: 'alice',
targetEntities: ['user1']
});
expect(aliceUser1Deltas).toHaveLength(1);
expect(aliceUser1Deltas[0].id).toBe('delta1');
});
it('applies pagination to queries', async () => {
const storage = getStorage();
for (const delta of testDeltas) {
await storage.storeDelta(delta);
}
// Test limit
const limitedDeltas = await storage.queryDeltas({ limit: 2 });
expect(limitedDeltas).toHaveLength(2);
// Test offset
const offsetDeltas = await storage.queryDeltas({ offset: 1 });
expect(offsetDeltas).toHaveLength(2);
// Test limit + offset
const pagedDeltas = await storage.queryDeltas({ offset: 1, limit: 1 });
expect(pagedDeltas).toHaveLength(1);
});
it('counts deltas', async () => {
const storage = getStorage();
for (const delta of testDeltas) {
await storage.storeDelta(delta);
}
const totalCount = await storage.countDeltas({});
expect(totalCount).toBe(3);
const aliceCount = await storage.countDeltas({ creator: 'alice' });
expect(aliceCount).toBe(2);
const user1Count = await storage.countDeltas({ targetEntities: ['user1'] });
expect(user1Count).toBe(2);
});
it('provides storage statistics', async () => {
const storage = getStorage();
for (const delta of testDeltas) {
await storage.storeDelta(delta);
}
const stats = await storage.getStats();
expect(stats.totalDeltas).toBe(3);
expect(stats.totalEntities).toBe(2); // user1 and user2
expect(stats.oldestDelta).toBeDefined();
expect(stats.newestDelta).toBeDefined();
expect(stats.oldestDelta! <= stats.newestDelta!).toBe(true);
});
}
});

View File

@ -0,0 +1,458 @@
import {
RhizomeNode,
Lossless,
Delta,
TimestampResolver,
CreatorIdTimestampResolver,
DeltaIdTimestampResolver,
HostIdTimestampResolver,
LexicographicTimestampResolver
} from "../src";
describe('Timestamp Resolvers', () => {
let node: RhizomeNode;
let lossless: Lossless;
beforeEach(() => {
node = new RhizomeNode();
lossless = new Lossless(node);
});
describe('Basic Timestamp Resolution', () => {
test('should resolve by most recent timestamp', () => {
// Add older delta
lossless.ingestDelta(new Delta({
creator: 'user1',
host: 'host1',
id: 'delta1',
timeCreated: 1000,
pointers: [{
localContext: "collection",
target: "entity1",
targetContext: "score"
}, {
localContext: "score",
target: 10
}]
}));
// Add newer delta
lossless.ingestDelta(new Delta({
creator: 'user2',
host: 'host2',
id: 'delta2',
timeCreated: 2000,
pointers: [{
localContext: "collection",
target: "entity1",
targetContext: "score"
}, {
localContext: "score",
target: 20
}]
}));
const resolver = new TimestampResolver(lossless);
const result = resolver.resolve();
expect(result).toBeDefined();
expect(result!['entity1'].properties.score).toBe(20); // More recent value wins
});
test('should handle multiple entities with different timestamps', () => {
// Entity1 - older value
lossless.ingestDelta(new Delta({
creator: 'user1',
host: 'host1',
timeCreated: 1000,
pointers: [{
localContext: "collection",
target: "entity1",
targetContext: "value"
}, {
localContext: "value",
target: 100
}]
}));
// Entity2 - newer value
lossless.ingestDelta(new Delta({
creator: 'user1',
host: 'host1',
timeCreated: 2000,
pointers: [{
localContext: "collection",
target: "entity2",
targetContext: "value"
}, {
localContext: "value",
target: 200
}]
}));
const resolver = new TimestampResolver(lossless);
const result = resolver.resolve();
expect(result).toBeDefined();
expect(result!['entity1'].properties.value).toBe(100);
expect(result!['entity2'].properties.value).toBe(200);
});
});
describe('Tie-Breaking Strategies', () => {
test('should break ties using creator-id strategy', () => {
// Two deltas with same timestamp, different creators
lossless.ingestDelta(new Delta({
creator: 'user_z', // Lexicographically later
host: 'host1',
id: 'delta1',
timeCreated: 1000,
pointers: [{
localContext: "collection",
target: "entity1",
targetContext: "score"
}, {
localContext: "score",
target: 10
}]
}));
lossless.ingestDelta(new Delta({
creator: 'user_a', // Lexicographically earlier
host: 'host1',
id: 'delta2',
timeCreated: 1000, // Same timestamp
pointers: [{
localContext: "collection",
target: "entity1",
targetContext: "score"
}, {
localContext: "score",
target: 20
}]
}));
const resolver = new CreatorIdTimestampResolver(lossless);
const result = resolver.resolve();
expect(result).toBeDefined();
// user_z comes later lexicographically, so should win
expect(result!['entity1'].properties.score).toBe(10);
});
test('should break ties using delta-id strategy', () => {
// Two deltas with same timestamp, different delta IDs
lossless.ingestDelta(new Delta({
creator: 'user1',
host: 'host1',
id: 'delta_a', // Lexicographically earlier
timeCreated: 1000,
pointers: [{
localContext: "collection",
target: "entity1",
targetContext: "score"
}, {
localContext: "score",
target: 10
}]
}));
lossless.ingestDelta(new Delta({
creator: 'user1',
host: 'host1',
id: 'delta_z', // Lexicographically later
timeCreated: 1000, // Same timestamp
pointers: [{
localContext: "collection",
target: "entity1",
targetContext: "score"
}, {
localContext: "score",
target: 20
}]
}));
const resolver = new DeltaIdTimestampResolver(lossless);
const result = resolver.resolve();
expect(result).toBeDefined();
// delta_z comes later lexicographically, so should win
expect(result!['entity1'].properties.score).toBe(20);
});
test('should break ties using host-id strategy', () => {
// Two deltas with same timestamp, different hosts
lossless.ingestDelta(new Delta({
creator: 'user1',
host: 'host_z', // Lexicographically later
id: 'delta1',
timeCreated: 1000,
pointers: [{
localContext: "collection",
target: "entity1",
targetContext: "score"
}, {
localContext: "score",
target: 10
}]
}));
lossless.ingestDelta(new Delta({
creator: 'user1',
host: 'host_a', // Lexicographically earlier
id: 'delta2',
timeCreated: 1000, // Same timestamp
pointers: [{
localContext: "collection",
target: "entity1",
targetContext: "score"
}, {
localContext: "score",
target: 20
}]
}));
const resolver = new HostIdTimestampResolver(lossless);
const result = resolver.resolve();
expect(result).toBeDefined();
// host_z comes later lexicographically, so should win
expect(result!['entity1'].properties.score).toBe(10);
});
test('should break ties using lexicographic strategy with string values', () => {
// Two deltas with same timestamp, different string values
lossless.ingestDelta(new Delta({
creator: 'user1',
host: 'host1',
id: 'delta1',
timeCreated: 1000,
pointers: [{
localContext: "collection",
target: "entity1",
targetContext: "name"
}, {
localContext: "name",
target: 'alice'
}]
}));
lossless.ingestDelta(new Delta({
creator: 'user1',
host: 'host1',
id: 'delta2',
timeCreated: 1000, // Same timestamp
pointers: [{
localContext: "collection",
target: "entity1",
targetContext: "name"
}, {
localContext: "name",
target: 'bob'
}]
}));
const resolver = new LexicographicTimestampResolver(lossless);
const result = resolver.resolve();
expect(result).toBeDefined();
// 'bob' comes later lexicographically than 'alice', so should win
expect(result!['entity1'].properties.name).toBe('bob');
});
test('should break ties using lexicographic strategy with numeric values (falls back to delta ID)', () => {
// Two deltas with same timestamp, numeric values (should fall back to delta ID comparison)
lossless.ingestDelta(new Delta({
creator: 'user1',
host: 'host1',
id: 'delta_a', // Lexicographically earlier
timeCreated: 1000,
pointers: [{
localContext: "collection",
target: "entity1",
targetContext: "score"
}, {
localContext: "score",
target: 100
}]
}));
lossless.ingestDelta(new Delta({
creator: 'user1',
host: 'host1',
id: 'delta_z', // Lexicographically later
timeCreated: 1000, // Same timestamp
pointers: [{
localContext: "collection",
target: "entity1",
targetContext: "score"
}, {
localContext: "score",
target: 200
}]
}));
const resolver = new LexicographicTimestampResolver(lossless);
const result = resolver.resolve();
expect(result).toBeDefined();
// Should fall back to delta ID comparison: delta_z > delta_a
expect(result!['entity1'].properties.score).toBe(200);
});
});
describe('Complex Tie-Breaking Scenarios', () => {
test('should handle multiple properties with different tie-breaking outcomes', () => {
// Add deltas for multiple properties with same timestamp
lossless.ingestDelta(new Delta({
creator: 'user_a',
host: 'host1',
id: 'delta_z',
timeCreated: 1000,
pointers: [{
localContext: "collection",
target: "entity1",
targetContext: "name"
}, {
localContext: "name",
target: 'alice'
}]
}));
lossless.ingestDelta(new Delta({
creator: 'user_z',
host: 'host1',
id: 'delta_a',
timeCreated: 1000, // Same timestamp
pointers: [{
localContext: "collection",
target: "entity1",
targetContext: "name"
}, {
localContext: "name",
target: 'bob'
}]
}));
const creatorResolver = new CreatorIdTimestampResolver(lossless);
const deltaResolver = new DeltaIdTimestampResolver(lossless);
const creatorResult = creatorResolver.resolve();
const deltaResult = deltaResolver.resolve();
expect(creatorResult).toBeDefined();
expect(deltaResult).toBeDefined();
// Creator strategy: user_z > user_a, so 'bob' wins
expect(creatorResult!['entity1'].properties.name).toBe('bob');
// Delta ID strategy: delta_z > delta_a, so 'alice' wins
expect(deltaResult!['entity1'].properties.name).toBe('alice');
});
test('should work consistently with timestamp priority over tie-breaking', () => {
// Add older delta with "better" tie-breaking attributes
lossless.ingestDelta(new Delta({
creator: 'user_z', // Would win in creator tie-breaking
host: 'host1',
id: 'delta_z', // Would win in delta ID tie-breaking
timeCreated: 1000, // Older timestamp
pointers: [{
localContext: "collection",
target: "entity1",
targetContext: "score"
}, {
localContext: "score",
target: 10
}]
}));
// Add newer delta with "worse" tie-breaking attributes
lossless.ingestDelta(new Delta({
creator: 'user_a', // Would lose in creator tie-breaking
host: 'host1',
id: 'delta_a', // Would lose in delta ID tie-breaking
timeCreated: 2000, // Newer timestamp
pointers: [{
localContext: "collection",
target: "entity1",
targetContext: "score"
}, {
localContext: "score",
target: 20
}]
}));
const resolver = new CreatorIdTimestampResolver(lossless);
const result = resolver.resolve();
expect(result).toBeDefined();
// Timestamp should take priority over tie-breaking, so newer value (20) wins
expect(result!['entity1'].properties.score).toBe(20);
});
});
describe('Edge Cases', () => {
test('should handle single delta correctly', () => {
lossless.ingestDelta(new Delta({
creator: 'user1',
host: 'host1',
id: 'delta1',
timeCreated: 1000,
pointers: [{
localContext: "collection",
target: "entity1",
targetContext: "value"
}, {
localContext: "value",
target: 42
}]
}));
const resolver = new TimestampResolver(lossless, 'creator-id');
const result = resolver.resolve();
expect(result).toBeDefined();
expect(result!['entity1'].properties.value).toBe(42);
});
test('should handle mixed value types correctly', () => {
lossless.ingestDelta(new Delta({
creator: 'user1',
host: 'host1',
id: 'delta1',
timeCreated: 1000,
pointers: [{
localContext: "collection",
target: "entity1",
targetContext: "name"
}, {
localContext: "name",
target: 'test'
}]
}));
lossless.ingestDelta(new Delta({
creator: 'user1',
host: 'host1',
id: 'delta2',
timeCreated: 1001,
pointers: [{
localContext: "collection",
target: "entity1",
targetContext: "score"
}, {
localContext: "score",
target: 100
}]
}));
const resolver = new TimestampResolver(lossless);
const result = resolver.resolve();
expect(result).toBeDefined();
expect(result!['entity1'].properties.name).toBe('test');
expect(result!['entity1'].properties.score).toBe(100);
});
});
});

501
__tests__/transactions.ts Normal file
View File

@ -0,0 +1,501 @@
import * as _RhizomeImports from "../src";
Review

This line appears to do nothing

This line appears to do nothing
import { Delta } from '../src/core';
import { Lossless } from '../src/views';
import { RhizomeNode } from '../src/node';
import { DeltaFilter } from '../src/core';
describe('Transactions', () => {
let node: RhizomeNode;
let lossless: Lossless;
beforeEach(() => {
node = new RhizomeNode();
lossless = new Lossless(node);
});
describe('Transaction-based filtering', () => {
it('should exclude deltas from incomplete transactions', () => {
const transactionId = 'tx-123';
// Create a delta that declares a transaction with size 3
const txDeclaration = new Delta({
creator: 'system',
host: 'host1',
pointers: [
{ localContext: '_transaction', target: transactionId, targetContext: 'size' },
{ localContext: 'size', target: 3 }
]
});
// Create first delta in transaction
const delta1 = new Delta({
creator: 'user1',
host: 'host1',
pointers: [
{ localContext: '_transaction', target: transactionId, targetContext: 'deltas' },
{ localContext: 'name', target: 'user123', targetContext: 'name' },
{ localContext: 'value', target: 'Alice' }
]
});
// Create second delta in transaction
const delta2 = new Delta({
creator: 'user1',
host: 'host1',
pointers: [
{ localContext: '_transaction', target: transactionId, targetContext: 'deltas' },
{ localContext: 'age', target: 'user123', targetContext: 'age' },
{ localContext: 'value', target: 25 }
]
});
// Ingest transaction declaration and first two deltas
lossless.ingestDelta(txDeclaration);
lossless.ingestDelta(delta1);
lossless.ingestDelta(delta2);
// View should be empty because transaction is incomplete (2/3 deltas)
const view = lossless.view(['user123']);
expect(view.user123).toBeUndefined();
// Add the third delta to complete the transaction
const delta3 = new Delta({
creator: 'user1',
host: 'host1',
pointers: [
{ localContext: '_transaction', target: transactionId, targetContext: 'deltas' },
{ localContext: 'email', target: 'user123', targetContext: 'email' },
{ localContext: 'value', target: 'alice@example.com' }
]
});
lossless.ingestDelta(delta3);
// Now the view should include all deltas from the completed transaction
const completeView = lossless.view(['user123']);
expect(completeView.user123).toBeDefined();
expect(completeView.user123.propertyDeltas.name).toHaveLength(1);
expect(completeView.user123.propertyDeltas.age).toHaveLength(1);
expect(completeView.user123.propertyDeltas.email).toHaveLength(1);
});
it('should handle multiple transactions independently', () => {
const tx1 = 'tx-001';
const tx2 = 'tx-002';
// Declare two transactions
lossless.ingestDelta(new Delta({
creator: 'system',
host: 'host1',
pointers: [
{ localContext: '_transaction', target: tx1, targetContext: 'size' },
{ localContext: 'size', target: 2 }
]
}));
lossless.ingestDelta(new Delta({
creator: 'system',
host: 'host1',
pointers: [
{ localContext: '_transaction', target: tx2, targetContext: 'size' },
{ localContext: 'size', target: 2 }
]
}));
// Add deltas for both transactions
lossless.ingestDelta(new Delta({
creator: 'user1',
host: 'host1',
pointers: [
{ localContext: '_transaction', target: tx1, targetContext: 'deltas' },
{ localContext: 'status', target: 'order1', targetContext: 'status' },
{ localContext: 'value', target: 'pending' }
]
}));
lossless.ingestDelta(new Delta({
creator: 'user2',
host: 'host2',
pointers: [
{ localContext: '_transaction', target: tx2, targetContext: 'deltas' },
{ localContext: 'status', target: 'order2', targetContext: 'status' },
{ localContext: 'value', target: 'shipped' }
]
}));
// Neither transaction is complete
let view = lossless.view(['order1', 'order2']);
expect(view.order1).toBeUndefined();
expect(view.order2).toBeUndefined();
// Complete tx1
lossless.ingestDelta(new Delta({
creator: 'user1',
host: 'host1',
pointers: [
{ localContext: '_transaction', target: tx1, targetContext: 'deltas' },
{ localContext: 'total', target: 'order1', targetContext: 'total' },
{ localContext: 'value', target: 100 }
]
}));
// tx1 is complete, tx2 is not
view = lossless.view(['order1', 'order2']);
expect(view.order1).toBeDefined();
expect(view.order1.propertyDeltas.status).toHaveLength(1);
expect(view.order1.propertyDeltas.total).toHaveLength(1);
expect(view.order2).toBeUndefined();
// Complete tx2
lossless.ingestDelta(new Delta({
creator: 'user2',
host: 'host2',
pointers: [
{ localContext: '_transaction', target: tx2, targetContext: 'deltas' },
{ localContext: 'tracking', target: 'order2', targetContext: 'tracking' },
{ localContext: 'value', target: 'TRACK123' }
]
}));
// Both transactions complete
view = lossless.view(['order1', 'order2']);
expect(view.order1).toBeDefined();
expect(view.order2).toBeDefined();
expect(view.order2.propertyDeltas.status).toHaveLength(1);
expect(view.order2.propertyDeltas.tracking).toHaveLength(1);
});
it('should work with transaction-aware delta filters', () => {
const transactionId = 'tx-filter-test';
// Create transaction with 2 deltas
lossless.ingestDelta(new Delta({
creator: 'system',
host: 'host1',
pointers: [
{ localContext: '_transaction', target: transactionId, targetContext: 'size' },
{ localContext: 'size', target: 2 }
]
}));
// Add both deltas
lossless.ingestDelta(new Delta({
creator: 'user1',
host: 'host1',
pointers: [
{ localContext: '_transaction', target: transactionId, targetContext: 'deltas' },
{ localContext: 'type', target: 'doc1', targetContext: 'type' },
{ localContext: 'value', target: 'report' }
]
}));
lossless.ingestDelta(new Delta({
creator: 'user2',
host: 'host2',
pointers: [
{ localContext: '_transaction', target: transactionId, targetContext: 'deltas' },
{ localContext: 'author', target: 'doc1', targetContext: 'author' },
{ localContext: 'value', target: 'Bob' }
]
}));
// Create a filter that only accepts deltas from user1
const userFilter: DeltaFilter = (delta) => delta.creator === 'user1';
// With incomplete transaction, nothing should show
// But once complete, the filter should still apply
const view = lossless.view(['doc1'], userFilter);
// Even though transaction is complete, only delta from user1 should appear
expect(view.doc1).toBeDefined();
expect(view.doc1.propertyDeltas.type).toHaveLength(1);
expect(view.doc1.propertyDeltas.author).toBeUndefined();
});
it('should handle transaction with deltas affecting multiple entities', () => {
const transactionId = 'tx-multi-entity';
// Transaction that updates multiple entities atomically
lossless.ingestDelta(new Delta({
creator: 'system',
host: 'host1',
pointers: [
{ localContext: '_transaction', target: transactionId, targetContext: 'size' },
{ localContext: 'size', target: 3 }
]
}));
// Transfer money from account1 to account2
lossless.ingestDelta(new Delta({
creator: 'bank',
host: 'host1',
pointers: [
{ localContext: '_transaction', target: transactionId, targetContext: 'deltas' },
{ localContext: 'balance', target: 'account1', targetContext: 'balance' },
{ localContext: 'value', target: 900 },
{ localContext: 'operation', target: 'debit' }
]
}));
lossless.ingestDelta(new Delta({
creator: 'bank',
host: 'host1',
pointers: [
{ localContext: '_transaction', target: transactionId, targetContext: 'deltas' },
{ localContext: 'balance', target: 'account2', targetContext: 'balance' },
{ localContext: 'value', target: 1100 },
{ localContext: 'operation', target: 'credit' }
]
}));
// Transaction incomplete - no entities should show updates
let view = lossless.view(['account1', 'account2']);
expect(view.account1).toBeUndefined();
expect(view.account2).toBeUndefined();
// Complete transaction with audit log
lossless.ingestDelta(new Delta({
creator: 'bank',
host: 'host1',
pointers: [
{ localContext: '_transaction', target: transactionId, targetContext: 'deltas' },
{ localContext: 'transfer', target: 'transfer123', targetContext: 'details' },
{ localContext: 'from', target: 'account1' },
{ localContext: 'to', target: 'account2' },
{ localContext: 'amount', target: 100 }
]
}));
// All entities should now be visible
view = lossless.view(['account1', 'account2', 'transfer123']);
expect(view.account1).toBeDefined();
expect(view.account1.propertyDeltas.balance).toHaveLength(1);
expect(view.account2).toBeDefined();
expect(view.account2.propertyDeltas.balance).toHaveLength(1);
expect(view.transfer123).toBeDefined();
expect(view.transfer123.propertyDeltas.details).toHaveLength(1);
});
it('should emit events only when transactions complete', async () => {
const transactionId = 'tx-events';
const updateEvents: Array<{ entityId: string, deltaIds: string[] }> = [];
// Listen for update events
lossless.eventStream.on('updated', (entityId, deltaIds) => {
updateEvents.push({ entityId, deltaIds });
});
// Create transaction
lossless.ingestDelta(new Delta({
creator: 'system',
host: 'host1',
pointers: [
{ localContext: '_transaction', target: transactionId, targetContext: 'size' },
{ localContext: 'size', target: 2 }
]
}));
// Add first delta
const delta1 = new Delta({
creator: 'user1',
host: 'host1',
pointers: [
{ localContext: '_transaction', target: transactionId, targetContext: 'deltas' },
{ localContext: 'field1', target: 'entity1', targetContext: 'field1' },
{ localContext: 'value', target: 'value1' }
]
});
lossless.ingestDelta(delta1);
// No events should be emitted yet
expect(updateEvents).toHaveLength(0);
// Add second delta to complete transaction
const delta2 = new Delta({
creator: 'user1',
host: 'host1',
pointers: [
{ localContext: '_transaction', target: transactionId, targetContext: 'deltas' },
{ localContext: 'field2', target: 'entity1', targetContext: 'field2' },
{ localContext: 'value', target: 'value2' }
]
});
lossless.ingestDelta(delta2);
// Wait for async event processing
await new Promise(resolve => setTimeout(resolve, 10));
// Now we should have received update events
// One for the transaction entity itself, and one for entity1
expect(updateEvents).toHaveLength(2);
// Find the entity1 update event
const entity1Update = updateEvents.find(e => e.entityId === 'entity1');
expect(entity1Update).toBeDefined();
expect(entity1Update!.deltaIds).toContain(delta1.id);
expect(entity1Update!.deltaIds).toContain(delta2.id);
});
it('should support waiting for transaction completion', async () => {
const transactionId = 'tx-wait';
// Create transaction
lossless.ingestDelta(new Delta({
creator: 'system',
host: 'host1',
pointers: [
{ localContext: '_transaction', target: transactionId, targetContext: 'size' },
{ localContext: 'size', target: 2 }
]
}));
// Add first delta
lossless.ingestDelta(new Delta({
creator: 'user1',
host: 'host1',
pointers: [
{ localContext: '_transaction', target: transactionId, targetContext: 'deltas' },
{ localContext: 'status', target: 'job1', targetContext: 'status' },
{ localContext: 'value', target: 'processing' }
]
}));
// Start waiting for transaction
const waitPromise = lossless.transactions.waitFor(transactionId);
let isResolved = false;
waitPromise.then(() => { isResolved = true; });
// Should not be resolved yet
await new Promise(resolve => setTimeout(resolve, 10));
expect(isResolved).toBe(false);
// Complete transaction
lossless.ingestDelta(new Delta({
creator: 'user1',
host: 'host1',
pointers: [
{ localContext: '_transaction', target: transactionId, targetContext: 'deltas' },
{ localContext: 'status', target: 'job1', targetContext: 'status' },
{ localContext: 'value', target: 'completed' }
]
}));
// Wait should now resolve
await waitPromise;
expect(isResolved).toBe(true);
// View should show completed transaction
const view = lossless.view(['job1']);
expect(view.job1).toBeDefined();
expect(view.job1.propertyDeltas.status).toHaveLength(2);
});
it('should handle non-transactional deltas normally', () => {
// Regular delta without transaction
const regularDelta = new Delta({
creator: 'user1',
host: 'host1',
pointers: [
{ localContext: 'name', target: 'user456', targetContext: 'name' },
{ localContext: 'value', target: 'Charlie' }
]
});
const updateEvents: string[] = [];
lossless.eventStream.on('updated', (entityId) => {
updateEvents.push(entityId);
});
lossless.ingestDelta(regularDelta);
// Should immediately appear in view
const view = lossless.view(['user456']);
expect(view.user456).toBeDefined();
expect(view.user456.propertyDeltas.name).toHaveLength(1);
// Should immediately emit update event
expect(updateEvents).toContain('user456');
});
});
describe('Transaction edge cases', () => {
it('should handle transaction size updates', () => {
const transactionId = 'tx-resize';
// Initially declare transaction with size 2
lossless.ingestDelta(new Delta({
creator: 'system',
host: 'host1',
pointers: [
{ localContext: '_transaction', target: transactionId, targetContext: 'size' },
{ localContext: 'size', target: 2 }
]
}));
// Add 2 deltas
lossless.ingestDelta(new Delta({
creator: 'user1',
host: 'host1',
pointers: [
{ localContext: '_transaction', target: transactionId, targetContext: 'deltas' },
{ localContext: 'item1', target: 'cart1', targetContext: 'items' }
]
}));
lossless.ingestDelta(new Delta({
creator: 'user1',
host: 'host1',
pointers: [
{ localContext: '_transaction', target: transactionId, targetContext: 'deltas' },
{ localContext: 'item2', target: 'cart1', targetContext: 'items' }
]
}));
// Transaction should be complete
expect(lossless.transactions.isComplete(transactionId)).toBe(true);
// View should show the cart
const view = lossless.view(['cart1']);
expect(view.cart1).toBeDefined();
});
it('should handle missing transaction size gracefully', () => {
const transactionId = 'tx-no-size';
// Add delta with transaction reference but no size declaration
lossless.ingestDelta(new Delta({
creator: 'user1',
host: 'host1',
pointers: [
{ localContext: '_transaction', target: transactionId, targetContext: 'deltas' },
{ localContext: 'data', target: 'entity1', targetContext: 'data' },
{ localContext: 'value', target: 'test' }
]
}));
// Transaction should not be complete (no size)
expect(lossless.transactions.isComplete(transactionId)).toBe(false);
// Delta should not appear in view
const view = lossless.view(['entity1']);
expect(view.entity1).toBeUndefined();
// Declare size after the fact
lossless.ingestDelta(new Delta({
creator: 'system',
host: 'host1',
pointers: [
{ localContext: '_transaction', target: transactionId, targetContext: 'size' },
{ localContext: 'size', target: 1 }
]
}));
// Now transaction should be complete
expect(lossless.transactions.isComplete(transactionId)).toBe(true);
// And delta should appear in view
const viewAfter = lossless.view(['entity1']);
expect(viewAfter.entity1).toBeDefined();
});
});
});

View File

View File

@ -0,0 +1 @@
MANIFEST-000030
Review

I think data/ should probably be excluded from the repo by .gitignore

I think `data/` should probably be excluded from the repo by `.gitignore`

View File

0
data/deltas-accepted/LOG Normal file
View File

View File

Binary file not shown.

View File

View File

@ -0,0 +1 @@
MANIFEST-000030

0
data/query-results/LOCK Normal file
View File

0
data/query-results/LOG Normal file
View File

View File

Binary file not shown.

View File

@ -7,6 +7,18 @@ export default tseslint.config(
{
ignores: [
"dist/",
],
]
},
{
rules: {
"@typescript-eslint/no-unused-vars": [
"error",
{
"argsIgnorePattern": "^_",
"varsIgnorePattern": "^_",
"caughtErrorsIgnorePattern": "^_"
}
]
}
}
);

View File

@ -1,7 +1,5 @@
import Debug from 'debug';
import {BasicCollection} from '../src/collection-basic';
import {Entity} from "../src/entity";
import {RhizomeNode} from "../src/node";
import {BasicCollection, Entity, RhizomeNode} from '../src';
const debug = Debug('example-app');
// As an app we want to be able to write and read data.

311
next_steps.md Normal file
View File

@ -0,0 +1,311 @@
# Phase 4: Delta Patterns & Query Traversal - Implementation Plan
## Overview
Phase 4 recognizes that in Rhizome, **deltas ARE relationships**. Instead of adding a relationship layer on top of deltas, we're creating tools to work with delta patterns more effectively. This phase focuses on formalizing common delta patterns, building query conveniences for traversing these patterns, and creating specialized resolvers that interpret deltas as familiar relational concepts.
## Core Insights
1. **Deltas are relationships**: Every delta with pointers already expresses relationships
2. **Patterns, not structure**: We're recognizing patterns in how deltas connect entities
3. **Perspective-driven**: Different views/resolvers can interpret the same deltas differently
4. **No single truth**: Competing deltas are resolved by application-level lossy resolvers
5. **Time-aware**: All queries are inherently temporal, showing different relationships at different times
## Current State ✅
- **All tests passing**: 21/21 suites, 183/183 tests (100%)
- **Delta system**: Fully functional with pointers expressing relationships
- **Negation system**: Can invalidate deltas (and thus relationships)
- **Query system**: Basic traversal of lossless views
- **Schema system**: Can describe entity structures
- **Resolver system**: Application-level interpretation of deltas
## Implementation Plan
### Step 1: Delta Pattern Recognition
**Goal**: Formalize common patterns of deltas that represent familiar relationships
**Tasks**:
1. Create `src/patterns/delta-patterns.ts`:
- Define patterns for common relationship types
- Create pattern matching utilities
- Document pattern conventions
2. Common patterns to recognize:
```typescript
// One-to-one: A delta pointing from A to B with unique constraint
const AuthorshipPattern = {
name: 'authorship',
match: (delta) =>
delta.pointers.some(p => p.targetContext === 'author') &&
delta.pointers.some(p => p.targetContext === 'post'),
interpret: (delta) => ({
post: delta.pointers.find(p => p.targetContext === 'post').target,
author: delta.pointers.find(p => p.targetContext === 'author').target
})
};
// One-to-many: Multiple deltas pointing from many Bs to one A
const PostsByAuthorPattern = {
name: 'posts-by-author',
query: (authorId) => ({
pointers: {
some: {
target: authorId,
targetContext: 'author'
}
}
})
};
```
3. Pattern validation:
- Ensure deltas match expected patterns
- Provide clear feedback when patterns are violated
- Allow flexible pattern definitions
### Step 2: Query Pattern Traversal
**Goal**: Make it easy to traverse delta patterns in queries
**Tasks**:
1. Extend `QueryEngine` with pattern-aware methods:
```typescript
// Find all deltas that establish a certain relationship
queryEngine.findRelationships('authorship', {
author: 'user-123'
});
// Traverse relationships in time
queryEngine.findRelationships('authorship', {
author: 'user-123',
asOf: timestamp // Time-travel query
});
```
2. Create traversal helpers:
```typescript
// Follow a chain of relationships
queryEngine.traverse({
start: 'user-123',
follow: [
{ pattern: 'authorship', direction: 'from' },
{ pattern: 'comments', direction: 'to' }
],
includeNegated: false // Perspective choice
});
```
3. Multi-perspective queries:
```typescript
// Different views of the same deltas
queryEngine.query('Post', {}, {
perspectives: {
published: { includeNegated: false },
draft: { includeNegated: true },
historical: { asOf: timestamp }
}
});
```
### Step 3: Pattern-Aware Resolvers
**Goal**: Create resolvers that interpret delta patterns as familiar concepts
**Tasks**:
1. Create `src/views/resolvers/pattern-resolver.ts`:
```typescript
class PatternResolver {
// Interpret deltas matching certain patterns
resolveWithPatterns(entityId, patterns) {
const deltas = this.lossless.getDeltasForEntity(entityId);
return {
entity: entityId,
relationships: patterns.map(pattern => ({
type: pattern.name,
targets: deltas
.filter(pattern.match)
.map(pattern.interpret)
}))
};
}
}
```
2. Specialized pattern resolvers:
- `ReferenceResolver`: Follows pointer patterns
- `TemporalResolver`: Shows relationships over time
- `CompetingValueResolver`: Handles multiple values for same relationship
3. Resolver composition:
```typescript
// Stack resolvers for different perspectives
const publishedView = new ResolverStack([
new NegationFilter(),
new TemporalResolver({ until: now }),
new LastWriteWins()
]);
```
### Step 4: Delta Pattern Validation
**Goal**: Validate that deltas follow expected patterns (without enforcing)
**Tasks**:
1. Create `src/features/pattern-validation.ts`:
```typescript
// Validate but don't enforce
validateDeltaPattern(delta, pattern) {
const result = pattern.validate(delta);
if (!result.valid) {
// Emit warning, but still accept delta
this.emit('pattern-warning', {
delta,
pattern: pattern.name,
issues: result.issues
});
}
return result;
}
```
2. Pattern constraints as guidance:
- Required pointer contexts
- Expected value types
- Cardinality suggestions
- Temporal constraints
3. Missing information detection:
```typescript
// Detect incomplete patterns
detectMissingRelationships(entity, expectedPatterns) {
return expectedPatterns.filter(pattern =>
!this.hasMatchingDelta(entity, pattern)
);
}
```
### Step 5: Collection Pattern Helpers
**Goal**: Make collections work naturally with delta patterns
**Tasks**:
1. Extend collections with pattern methods:
```typescript
class PatternAwareCollection extends Collection {
// Create deltas that match patterns
relate(from, to, pattern) {
const delta = pattern.createDelta(from, to);
return this.rhizomeNode.acceptDelta(delta);
}
// Query using patterns
findRelated(entity, pattern) {
return this.queryEngine.findRelationships(pattern, {
[pattern.fromContext]: entity
});
}
}
```
2. Pattern-based operations:
- Batch relationship creation
- Relationship negation helpers
- Pattern-based cascades
### Step 6: Temporal Pattern Queries
**Goal**: Leverage time-travel for relationship history
**Tasks**:
1. Time-aware pattern queries:
```typescript
// Show relationship changes over time
queryEngine.relationshipHistory('authorship', {
post: 'post-123',
timeRange: { from: t1, to: t2 }
});
// Find when relationships were established/negated
queryEngine.relationshipTimeline(entityId);
```
2. Temporal pattern analysis:
- Relationship duration
- Relationship conflicts over time
- Pattern evolution
## File Structure
**New files to create**:
```
src/
├── patterns/
│ ├── delta-patterns.ts # Pattern definitions
│ ├── pattern-matcher.ts # Pattern matching utilities
│ └── pattern-validators.ts # Pattern validation
├── query/
│ └── pattern-query-engine.ts # Pattern-aware queries
├── views/
│ └── resolvers/
│ ├── pattern-resolver.ts # Pattern interpretation
│ └── temporal-resolver.ts # Time-aware resolution
└── features/
└── pattern-validation.ts # Soft validation
```
**Files to modify**:
- `src/query/query-engine.ts` - Add pattern methods
- `src/collections/collection-abstract.ts` - Add pattern helpers
- `src/node.ts` - Wire up pattern features
## Testing Strategy
**New test files**:
- `__tests__/delta-patterns.ts` - Pattern definition and matching
- `__tests__/pattern-queries.ts` - Pattern-based traversal
- `__tests__/pattern-validation.ts` - Soft validation behavior
- `__tests__/temporal-patterns.ts` - Time-travel relationship queries
- `__tests__/competing-relationships.ts` - Multiple relationship handling
**Test scenarios**:
1. Define and match delta patterns
2. Query relationships using patterns
3. Validate deltas against patterns (warnings only)
4. Time-travel through relationship history
5. Handle competing relationship deltas
6. Detect missing relationships
7. Test pattern-based cascading negations
## Success Criteria
- [ ] Delta patterns are well-defined and matchable
- [ ] Queries can traverse relationships via delta patterns
- [ ] Pattern validation provides guidance without enforcement
- [ ] Time-travel queries work with relationships
- [ ] Competing relationships are handled gracefully
- [ ] Missing relationships are detectable
- [ ] Performance scales with pattern complexity
- [ ] Developers find patterns intuitive to use
## Key Principles to Maintain
1. **Deltas are relationships** - Never create a separate relationship system
2. **Patterns are recognition** - We're recognizing what's already there
3. **Perspective matters** - Same deltas, different interpretations
4. **No enforcement** - Validation guides but doesn't restrict
5. **Time is first-class** - All relationships exist in time
6. **Conflicts are natural** - Multiple truths coexist until resolved by views
## Next Session Tasks
1. Define core delta patterns in `delta-patterns.ts`
2. Create pattern matching utilities
3. Extend QueryEngine with pattern-aware methods
4. Write tests for pattern recognition
5. Document the delta-as-relationship philosophy
This approach embraces Rhizome's fundamental architecture where deltas ARE the relationships, making it easier to work with these patterns while respecting the system's perspective-driven, temporal nature.

View File

@ -2,6 +2,7 @@
"name": "rhizome-node",
"version": "0.1.0",
"description": "Rhizomatic database engine node",
"type": "module",
"scripts": {
"build": "tsc",
"build:watch": "tsc --watch",
@ -53,4 +54,4 @@
"typescript": "^5.7.2",
"typescript-eslint": "^8.18.0"
}
}
}

View File

@ -0,0 +1,130 @@
# Spec vs Implementation Test Coverage Report
## Executive Summary
The rhizome-node implementation demonstrates strong alignment with core spec concepts but lacks implementation and testing for several advanced features. The fundamental delta → lossless → lossy transformation pipeline is well-implemented, while query systems, relational features, and advanced conflict resolution remain unimplemented.
## Core Concept Alignment
### ✅ Well-Aligned Concepts
1. **Delta Structure**
- **Spec**: Deltas contain pointers with name/target/context fields
- **Implementation**: Correctly implements both V1 (array) and V2 (object) formats
- **Tests**: Basic format conversion tested, but validation gaps exist
2. **Lossless Views**
- **Spec**: Full inventory of all deltas composing an object
- **Implementation**: `LosslessViewDomain` correctly accumulates deltas by entity/property
- **Tests**: Good coverage of basic transformation, filtering by creator/host
3. **Lossy Views**
- **Spec**: Compression of lossless views using resolution strategies
- **Implementation**: Initializer/reducer/resolver pattern provides flexibility
- **Tests**: Domain-specific example (Role/Actor/Film) demonstrates concept
4. **Basic Conflict Resolution**
- **Spec**: Resolution strategies for collapsing delta sets
- **Implementation**: Last-Write-Wins resolver implemented
- **Tests**: Basic LWW tested, but limited to simple cases
### ⚠️ Partial Implementations
1. **Schemas**
- **Spec**: Templates for object compilation with property specification
- **Implementation**: `TypedCollection<T>` provides thin typing layer
- **Tests**: No schema validation or constraint testing
2. **Negation**
- **Spec**: Specific delta type with "negates" pointer
- **Implementation**: Not explicitly implemented
- **Tests**: No negation tests
3. **Transactions**
- **Spec**: Not explicitly mentioned but implied by delta grouping
- **Implementation**: Transaction structure exists in types
- **Tests**: Transaction filtering marked as TODO
### ❌ Missing Implementations
1. **Query System**
- **Spec**: JSON Logic expressions for filtering
- **Implementation**: Types exist but no implementation
- **Tests**: All query tests are skipped
2. **Relational Features**
- **Spec**: Schema-based relationships between objects
- **Implementation**: `collection-relational.ts` exists but minimal
- **Tests**: All relational tests are skipped
3. **Advanced Conflict Resolution**
- **Spec**: Multiple resolution strategies (min/max/average for numerics)
- **Implementation**: Only LWW implemented
- **Tests**: No tests for alternative strategies
4. **Nested Object Resolution**
- **Spec**: Schema-controlled depth limiting to prevent infinite recursion
- **Implementation**: Not implemented
- **Tests**: No tests for nested object handling
## Test Coverage Gaps
### Critical Missing Tests
1. **Delta Validation**
- No tests for invalid delta structures
- No tests for required field validation
- No tests for pointer consistency
2. **Schema Enforcement**
- No tests for schema validation during view generation
- No tests for property type enforcement
- No tests for nested schema application
3. **Concurrent Operations**
- No tests for concurrent delta creation
- No tests for timestamp-based ordering edge cases
- No tests for distributed conflict scenarios
4. **Network Resilience**
- Limited peer connection testing
- No tests for network partitions
- No tests for delta propagation failures
### Performance and Scale
1. **Large Dataset Handling**
- No tests for entities with thousands of deltas
- No tests for memory efficiency of views
- No tests for query performance on large collections
2. **View Materialization**
- No tests for incremental view updates
- No tests for view caching strategies
- No tests for partial view generation
## Recommendations
### High Priority
1. **Implement Query System**: The skipped query tests suggest this is a planned feature
2. **Add Schema Validation**: Essential for data integrity in distributed systems
3. **Expand Conflict Resolution**: Implement numeric aggregation strategies
4. **Test Edge Cases**: Add validation, error handling, and concurrent operation tests
### Medium Priority
1. **Implement Negation**: Core spec concept currently missing
2. **Add Nested Object Handling**: Prevent infinite recursion with schema depth limits
3. **Enhance Transaction Support**: Complete transaction-based filtering
4. **Improve Network Testing**: Add resilience and partition tolerance tests
### Low Priority
1. **Performance Benchmarks**: Add tests for scale and efficiency
2. **Advanced CRDT Features**: Implement vector clocks or hybrid logical clocks
3. **View Optimization**: Add incremental update mechanisms
## Conclusion
The rhizome-node implementation successfully captures the core concepts of the spec but requires significant work to achieve full compliance. The foundation is solid, with the delta/lossless/lossy pipeline working as designed. However, advanced features like queries, schemas, and sophisticated conflict resolution remain unimplemented. The test suite would benefit from expanded coverage of edge cases, validation, and distributed system scenarios.

18
spec.md Normal file
View File

@ -0,0 +1,18 @@
* A `delta` is an immutable atomic unit that relates one or more values in semantically meaningful ways as of some point in time. A delta can be thought of as both a `CRDT` and as a `hyper-edge` in the implicit hypergraph that makes up the rhizome. A delta contains one or more `pointers`.
* A `pointer` is composed of at least two and possibly three fields. A `delta` contains a set of `pointers`. The fields of a pointer are:
* `name` - identifies the meaning of the pointer from the perspective of the delta that contains it.
* `target` - identifies a `value` to associate with the `name`.
* `context` - optionally, when pointing at an `object`, the `context` identifies the field or property of that object with which this delta is associated.
* A `value` is one of two kinds of primitive that can be referred to by a `delta`:
* a `reference` is a UUID or other value understood to be pointing at either a `delta` or an `object`.
* a `primitive` is a literal string, number or boolean value whose meaning is not tied up in its being a reference to a larger whole.
* An `object` is a composite object whose entire existence is encoded as the set of deltas that reference it. An object is identified by a unique `reference`, and every delta that includes that `reference` is asserting a claim about some property of that object.
* A `negation` is a specific kind of delta that includes a pointer with the name `negates`, a `target` reference to another delta, and a `context` called `negated_by`.
* a `schema` represents a template by which an `object` can be compiled into a `lossless view`. A schema specifies which properties of that object are included, and it specifies schemas for the objects references by the deltas within those properties. A schema must terminate in primitive schemas to avoid an infinite regress.
* For instance, a `lossless view` "User" of a user may include references to friends. If those friends are in turn encoded as instances of the "User" schema then all of *their* friends would be fully encoded, etc.
* This could lead to circular references and arbitrarily deep nesting, which runs into the problem of "returning the entire graph". So our schema should specify, for instance, that the "friends" field apply the "Summary" schema to referenced users rather than the "User" schema, where the "Summary" schema simply resolves to username and photo.
* A `lossless view` is a representation of an `object` that includes a full inventory of all of the deltas that compose that object. So for instance, a lossless view of the object representing the user "Alice" might include `alice.name`, which contains an array of all deltas with a pointer whose `target` is the ID of Alice and whose context is `name`. Such deltas would likely include a second pointer with the name `name` and the target a primitive string "Alice", for instance.
* A `lossless view` may also include nested delta/object layering. Consider `alice.friends`, which would include all deltas asserting friendship between Alice and some other person. Each such delta would reference a different friend object. In a lossless view, these references would be expanded to contain lossless views of those friends. Schemas, as defined above, would be applied to constrain tree depth and avoid infinite regress.
* A `lossy view` is a compression of a `lossless view` that removes delta information and flattens the structure into a standard domain object, typically in JSON. So instead of `alice.name` resolving to a list of deltas that assert the object's name it might simply resolve to `"alice"`.
* Note that in a lossless view any property of an object necessarily resolves to a set of deltas, even if it's an empty set, because we cannot anticipate how many deltas exist that assert values on that context.
* In collapsing a lossless view into a lossy view we may specify `resolution strategies` on each field of the schema. A resolution strategy takes as input the set of all deltas targeting that context and returns as output the value in the lossy view. So if we have 15 deltas asserting the value for an object's name, our resolution strategy may simply say "return the target of the `name` pointer associated with the most recent delta", or it may say "return an array of names". If the value is numeric it may say "take the max" or "take the min" or "take the average".

View File

@ -1,11 +1,11 @@
import Debug from 'debug';
import {randomUUID} from "node:crypto";
import EventEmitter from "node:events";
import {Delta} from "./delta";
import {Entity, EntityProperties} from "./entity";
import {ResolvedViewOne} from './last-write-wins';
import {RhizomeNode} from "./node";
import {DomainEntityID} from "./types";
import {Delta} from "../core/delta";
import {Entity, EntityProperties} from "../core/entity";
import {ResolvedViewOne} from '../views/resolvers/last-write-wins';
import {RhizomeNode} from "../node";
import {DomainEntityID} from "../core/types";
const debug = Debug('rz:abstract-collection');
export abstract class Collection<View> {

View File

@ -3,11 +3,11 @@
// It should enable operations like removing a property removes the value from the entities in the collection
// It could then be further extended with e.g. table semantics like filter, sort, join
import {Collection} from './collection-abstract';
import {LastWriteWins, ResolvedViewOne} from './last-write-wins';
import {Collection} from '../collections/collection-abstract';
import {LastWriteWins, ResolvedViewOne} from '../views/resolvers/last-write-wins';
export class BasicCollection extends Collection<LastWriteWins> {
lossy?: LastWriteWins;
declare lossy?: LastWriteWins;
initializeView() {
if (!this.rhizomeNode) throw new Error('not connected to rhizome');

View File

@ -1,11 +1,11 @@
import {Collection} from "./collection-abstract";
import {LastWriteWins, ResolvedViewOne} from "./last-write-wins";
import {LastWriteWins, ResolvedViewOne} from "../views/resolvers/last-write-wins";
class RelationalView extends LastWriteWins {
}
export class RelationalCollection extends Collection<RelationalView> {
lossy?: RelationalView;
declare lossy?: RelationalView;
initializeView() {
if (!this.rhizomeNode) throw new Error('not connected to rhizome');

View File

@ -0,0 +1,248 @@
import Debug from 'debug';
import { Collection } from '../collections/collection-abstract';
import { LastWriteWins, ResolvedViewOne } from '../views/resolvers/last-write-wins';
import {
ObjectSchema,
SchemaValidationResult,
SchemaAppliedView,
TypedCollection,
SchemaApplicationOptions
} from '../schema/schema';
import { DefaultSchemaRegistry } from '../schema/schema-registry';
import { LosslessViewOne } from '../views/lossless';
import { DomainEntityID, PropertyTypes } from '../core/types';
import { EntityProperties } from '../core/entity';
const debug = Debug('rz:typed-collection');
export class SchemaValidationError extends Error {
constructor(message: string, public validationResult: SchemaValidationResult) {
super(message);
this.name = 'SchemaValidationError';
}
}
export class TypedCollectionImpl<T extends Record<string, unknown>>
extends Collection<LastWriteWins>
implements TypedCollection<T> {
schema: ObjectSchema;
private schemaRegistry: DefaultSchemaRegistry;
private applicationOptions: SchemaApplicationOptions;
constructor(
name: string,
schema: ObjectSchema,
schemaRegistry: DefaultSchemaRegistry,
options: SchemaApplicationOptions = {}
) {
super(name);
this.schema = schema;
this.schemaRegistry = schemaRegistry;
this.applicationOptions = {
maxDepth: 3,
includeMetadata: true,
strictValidation: false,
...options
};
// Register the schema if not already registered
if (!this.schemaRegistry.get(schema.id)) {
this.schemaRegistry.register(schema);
}
debug(`Created typed collection '${name}' with schema '${schema.id}'`);
}
initializeView(): void {
if (!this.rhizomeNode) throw new Error('not connected to rhizome');
this.lossy = new LastWriteWins(this.rhizomeNode.lossless);
}
resolve(id: string): ResolvedViewOne | undefined {
if (!this.rhizomeNode) throw new Error('collection not connected to rhizome');
if (!this.lossy) throw new Error('lossy view not initialized');
const res = this.lossy.resolve([id]) || {};
return res[id];
}
// Validate an entity against the schema
validate(entity: T): SchemaValidationResult {
// Convert entity to a mock lossless view for validation
const mockLosslessView: LosslessViewOne = {
id: 'validation-mock',
referencedAs: [],
propertyDeltas: {}
};
// Create mock deltas for each property
for (const [key, value] of Object.entries(entity)) {
if (value !== undefined) {
mockLosslessView.propertyDeltas[key] = [{
id: 'mock-delta',
timeCreated: Date.now(),
host: 'validation',
creator: 'validation',
pointers: [{ [key]: value as PropertyTypes }]
}];
}
}
return this.schemaRegistry.validate('validation-mock', this.schema.id, mockLosslessView);
}
// Apply schema to a lossless view
apply(view: LosslessViewOne): SchemaAppliedView {
return this.schemaRegistry.applySchema(view, this.schema.id, this.applicationOptions);
}
// Get a schema-validated view of an entity
getValidatedView(entityId: DomainEntityID): SchemaAppliedView | undefined {
if (!this.rhizomeNode) throw new Error('collection not connected to rhizome');
const losslessView = this.rhizomeNode.lossless.view([entityId])[entityId];
if (!losslessView) return undefined;
return this.apply(losslessView);
}
// Get all entities in this collection with schema validation
getAllValidatedViews(): SchemaAppliedView[] {
if (!this.rhizomeNode) throw new Error('collection not connected to rhizome');
const entityIds = this.getIds();
const views: SchemaAppliedView[] = [];
for (const entityId of entityIds) {
const view = this.getValidatedView(entityId);
if (view) {
views.push(view);
}
}
return views;
}
// Override put to include schema validation
async put(
entityId: DomainEntityID | undefined,
properties: EntityProperties,
): Promise<ResolvedViewOne> {
// Validate against schema if strict validation is enabled
if (this.applicationOptions.strictValidation) {
const validationResult = this.validate(properties as T);
if (!validationResult.valid) {
throw new SchemaValidationError(
`Schema validation failed: ${validationResult.errors.map(e => e.message).join(', ')}`,
validationResult
);
}
}
// Call parent put method
const result = await super.put(entityId, properties);
// Log validation warnings if any
const validationResult = this.validate(properties as T);
if (validationResult.warnings.length > 0) {
debug(`Validation warnings for entity ${entityId}:`, validationResult.warnings);
}
return result;
}
// Get validation statistics for the collection
getValidationStats(): {
totalEntities: number;
validEntities: number;
invalidEntities: number;
entitiesWithWarnings: number;
commonErrors: Map<string, number>;
} {
const entityIds = this.getIds();
const stats = {
totalEntities: entityIds.length,
validEntities: 0,
invalidEntities: 0,
entitiesWithWarnings: 0,
commonErrors: new Map<string, number>()
};
for (const entityId of entityIds) {
if (!this.rhizomeNode) continue;
const losslessView = this.rhizomeNode.lossless.view([entityId])[entityId];
if (!losslessView) continue;
const validationResult = this.schemaRegistry.validate(entityId, this.schema.id, losslessView);
if (validationResult.valid) {
stats.validEntities++;
} else {
stats.invalidEntities++;
}
if (validationResult.warnings.length > 0) {
stats.entitiesWithWarnings++;
}
// Count common errors
for (const error of validationResult.errors) {
const count = stats.commonErrors.get(error.message) || 0;
stats.commonErrors.set(error.message, count + 1);
}
}
return stats;
}
// Filter entities by schema validation status
getValidEntities(): DomainEntityID[] {
if (!this.rhizomeNode) return [];
return this.getIds().filter(entityId => {
const losslessView = this.rhizomeNode!.lossless.view([entityId])[entityId];
if (!losslessView) return false;
const validationResult = this.schemaRegistry.validate(entityId, this.schema.id, losslessView);
return validationResult.valid;
});
}
getInvalidEntities(): Array<{ entityId: DomainEntityID; errors: string[] }> {
if (!this.rhizomeNode) return [];
const invalid: Array<{ entityId: DomainEntityID; errors: string[] }> = [];
for (const entityId of this.getIds()) {
const losslessView = this.rhizomeNode.lossless.view([entityId])[entityId];
if (!losslessView) continue;
const validationResult = this.schemaRegistry.validate(entityId, this.schema.id, losslessView);
if (!validationResult.valid) {
invalid.push({
entityId,
errors: validationResult.errors.map(e => e.message)
});
}
}
return invalid;
}
// Schema introspection
getSchemaInfo(): {
schema: ObjectSchema;
dependencies: string[];
hasCircularDependencies: boolean;
} {
const dependencies = this.schemaRegistry.getDependencyGraph().get(this.schema.id) || new Set();
return {
schema: this.schema,
dependencies: Array.from(dependencies),
hasCircularDependencies: this.schemaRegistry.hasCircularDependencies()
};
}
}

4
src/collections/index.ts Normal file
View File

@ -0,0 +1,4 @@
export * from './collection-abstract';
export * from './collection-basic';
export * from './collection-typed';
export * from './collection-relational';

View File

@ -4,6 +4,10 @@ import {randomUUID} from "crypto";
// _ADDR refers to the interface address from the service's perspective
export const LEVEL_DB_DIR = process.env.RHIZOME_LEVEL_DB_DIR ?? './data';
// Storage configuration
export const STORAGE_TYPE = process.env.RHIZOME_STORAGE_TYPE || 'memory'; // 'memory' | 'leveldb' | 'sqlite' | 'postgres'
export const STORAGE_PATH = process.env.RHIZOME_STORAGE_PATH || './data/rhizome';
export const CREATOR = process.env.USER!;
export const PEER_ID = process.env.RHIZOME_PEER_ID || randomUUID();
export const ADDRESS = process.env.RHIZOME_ADDRESS ?? 'localhost';

View File

@ -2,7 +2,7 @@
// So we want it to be fluent to express these in the local context,
// and propagated as deltas in a configurable manner; i.e. configurable batches or immediate
// import {Delta} from './types';
// import {Delta} from '../core/types';
export class Entity {
}

View File

@ -1,13 +1,14 @@
import {randomUUID} from "crypto";
import Debug from 'debug';
import microtime from 'microtime';
import {PeerAddress} from "./peers";
import {PeerAddress} from "../network/peers";
import {CreatorID, DomainEntityID, HostID, PropertyID, Timestamp, TransactionID} from "./types";
import {validateDeltaNetworkImageV1, validateDeltaNetworkImageV2} from "../features/delta-validation";
const debug = Debug('rz:delta');
export type DeltaID = string;
export type PointerTarget = string | number | null;
export type PointerTarget = string | number | boolean | null;
type PointerV1 = {
localContext: string;
@ -15,7 +16,7 @@ type PointerV1 = {
targetContext?: string;
};
export type Scalar = string | number | null;
export type Scalar = string | number | boolean | null;
export type Reference = {
[key: DomainEntityID]: PropertyID
};
@ -75,6 +76,7 @@ export class DeltaV1 extends DeltaNetworkImageV1 {
}
static fromNetworkImage(delta: DeltaNetworkImageV1) {
validateDeltaNetworkImageV1(delta);
return new DeltaV1(delta);
}
}
@ -98,6 +100,7 @@ export class DeltaV2 extends DeltaNetworkImageV2 {
}
static fromNetworkImage(delta: DeltaNetworkImageV2) {
validateDeltaNetworkImageV2(delta);
return new DeltaV2(delta);
}

4
src/core/index.ts Normal file
View File

@ -0,0 +1,4 @@
export * from './delta';
export * from './types';
export * from './context';
export { Entity } from './entity';

View File

@ -4,7 +4,7 @@ export type FilterExpr = JSONLogic;
export type FilterGenerator = () => FilterExpr;
export type PropertyTypes = string | number | null;
export type PropertyTypes = string | number | boolean | null;
export type DomainEntityID = string;
export type PropertyID = string;

View File

@ -0,0 +1,188 @@
import { DeltaID, PointerTarget, DeltaNetworkImageV1, DeltaNetworkImageV2, PointersV2 } from "../core/delta";
import { CreatorID, HostID, Timestamp } from "../core/types";
// Custom error types for delta operations
export class DeltaValidationError extends Error {
constructor(message: string, public field?: string) {
super(message);
this.name = "DeltaValidationError";
}
}
export class InvalidDeltaFormatError extends DeltaValidationError {
constructor(message: string, field?: string) {
super(message, field);
this.name = "InvalidDeltaFormatError";
}
}
export class MissingRequiredFieldError extends DeltaValidationError {
constructor(field: string) {
super(`Missing required field: ${field}`, field);
this.name = "MissingRequiredFieldError";
}
}
export class InvalidPointerError extends DeltaValidationError {
constructor(message: string, pointerIndex?: number) {
super(message, pointerIndex !== undefined ? `pointer[${pointerIndex}]` : undefined);
this.name = "InvalidPointerError";
}
}
// Validation functions
export function validateDeltaId(id: unknown): id is DeltaID {
if (typeof id !== "string" || id.length === 0) {
throw new InvalidDeltaFormatError("Delta ID must be a non-empty string", "id");
}
return true;
}
export function validateTimestamp(timestamp: unknown, field: string): timestamp is Timestamp {
if (typeof timestamp !== "number" || timestamp <= 0) {
throw new InvalidDeltaFormatError(`${field} must be a positive number`, field);
}
return true;
}
export function validateHostId(host: unknown): host is HostID {
if (typeof host !== "string" || host.length === 0) {
throw new InvalidDeltaFormatError("Host ID must be a non-empty string", "host");
}
return true;
}
export function validateCreatorId(creator: unknown): creator is CreatorID {
if (typeof creator !== "string" || creator.length === 0) {
throw new InvalidDeltaFormatError("Creator ID must be a non-empty string", "creator");
}
return true;
}
export function validatePointerTarget(target: unknown): target is PointerTarget {
if (target !== null && typeof target !== "string" && typeof target !== "number" && typeof target !== "boolean") {
throw new InvalidPointerError("Pointer target must be string, number, boolean, or null");
}
return true;
}
export function validatePointerV1(pointer: unknown, index: number): pointer is { localContext: string; target: PointerTarget; targetContext?: string } {
if (!pointer || typeof pointer !== "object" || Array.isArray(pointer)) {
throw new InvalidPointerError(`Pointer at index ${index} must be an object`, index);
}
const p = pointer as Record<string, unknown>;
if (typeof p.localContext !== "string" || p.localContext.length === 0) {
throw new InvalidPointerError(`Pointer at index ${index} must have a non-empty localContext`, index);
}
validatePointerTarget(p.target);
if (p.targetContext !== undefined &&
(typeof p.targetContext !== "string" || p.targetContext.length === 0)) {
throw new InvalidPointerError(`Pointer at index ${index} targetContext must be a non-empty string if present`, index);
}
// Validate pointer consistency: if targetContext exists, target must be a string (reference)
if (p.targetContext && typeof p.target !== "string") {
throw new InvalidPointerError(`Pointer at index ${index} with targetContext must have string target (reference)`, index);
}
return true;
}
export function validatePointersV1(pointers: unknown): pointers is Array<{ localContext: string; target: PointerTarget; targetContext?: string }> {
if (!Array.isArray(pointers)) {
throw new InvalidDeltaFormatError("Pointers must be an array", "pointers");
}
if (pointers.length === 0) {
throw new InvalidDeltaFormatError("Delta must have at least one pointer", "pointers");
}
(pointers as unknown[]).forEach((pointer, index) => validatePointerV1(pointer, index));
return true;
}
export function validatePointersV2(pointers: unknown): pointers is PointersV2 {
if (!pointers || typeof pointers !== "object" || Array.isArray(pointers)) {
throw new InvalidDeltaFormatError("Pointers must be an object", "pointers");
}
const keys = Object.keys(pointers);
if (keys.length === 0) {
throw new InvalidDeltaFormatError("Delta must have at least one pointer", "pointers");
}
for (const [key, value] of Object.entries(pointers)) {
if (key.length === 0) {
throw new InvalidPointerError("Pointer key must be a non-empty string");
}
if (value !== null && typeof value !== "string" && typeof value !== "number" && typeof value !== "boolean" && typeof value !== "object") {
throw new InvalidPointerError(`Invalid pointer value for key '${key}'`);
}
// If value is an object (Reference), validate it
if (value && typeof value === "object") {
const refKeys = Object.keys(value);
if (refKeys.length !== 1) {
throw new InvalidPointerError(`Reference pointer '${key}' must have exactly one key-value pair`);
}
const [refKey, refValue] = Object.entries(value)[0];
if (typeof refKey !== "string" || refKey.length === 0) {
throw new InvalidPointerError(`Reference key in pointer '${key}' must be a non-empty string`);
}
if (typeof refValue !== "string" || refValue.length === 0) {
throw new InvalidPointerError(`Reference value in pointer '${key}' must be a non-empty string`);
}
}
}
return true;
}
export function validateDeltaNetworkImageV1(delta: unknown): delta is DeltaNetworkImageV1 {
if (!delta || typeof delta !== "object" || Array.isArray(delta)) {
throw new InvalidDeltaFormatError("Delta must be an object");
}
// Check required fields
if (!("id" in delta)) throw new MissingRequiredFieldError("id");
if (!("timeCreated" in delta)) throw new MissingRequiredFieldError("timeCreated");
if (!("host" in delta)) throw new MissingRequiredFieldError("host");
if (!("creator" in delta)) throw new MissingRequiredFieldError("creator");
if (!("pointers" in delta)) throw new MissingRequiredFieldError("pointers");
// Validate field types
validateDeltaId(delta.id);
validateTimestamp(delta.timeCreated, "timeCreated");
validateHostId(delta.host);
validateCreatorId(delta.creator);
validatePointersV1(delta.pointers);
return true;
}
export function validateDeltaNetworkImageV2(delta: unknown): delta is DeltaNetworkImageV2 {
if (!delta || typeof delta !== "object" || Array.isArray(delta)) {
throw new InvalidDeltaFormatError("Delta must be an object");
}
// Check required fields
if (!("id" in delta)) throw new MissingRequiredFieldError("id");
if (!("timeCreated" in delta)) throw new MissingRequiredFieldError("timeCreated");
if (!("host" in delta)) throw new MissingRequiredFieldError("host");
if (!("creator" in delta)) throw new MissingRequiredFieldError("creator");
if (!("pointers" in delta)) throw new MissingRequiredFieldError("pointers");
// Validate field types
validateDeltaId(delta.id);
validateTimestamp(delta.timeCreated, "timeCreated");
validateHostId(delta.host);
validateCreatorId(delta.creator);
validatePointersV2(delta.pointers);
return true;
}

3
src/features/index.ts Normal file
View File

@ -0,0 +1,3 @@
export * from './negation';
export * from './transactions';
export * from './delta-validation';

209
src/features/negation.ts Normal file
View File

@ -0,0 +1,209 @@
import Debug from 'debug';
import { Delta, DeltaID } from '../core/delta';
import { CreatorID, HostID } from '../core/types';
const debug = Debug('rz:negation');
// Negation-specific types
export interface NegationPointer {
localContext: 'negates';
target: DeltaID;
targetContext: 'negated_by';
}
export interface NegationDelta extends Delta {
isNegation: true;
negatedDeltaId: DeltaID;
}
// Helper functions for creating and identifying negation deltas
export class NegationHelper {
/**
* Create a negation delta that negates another delta
*/
static createNegation(
deltaToNegate: DeltaID,
creator: CreatorID,
host: HostID
): NegationDelta {
const negationDelta = new Delta({
creator,
host,
pointers: [{
localContext: 'negates',
target: deltaToNegate,
targetContext: 'negated_by'
}]
}) as NegationDelta;
negationDelta.isNegation = true;
negationDelta.negatedDeltaId = deltaToNegate;
debug(`Created negation delta ${negationDelta.id} negating ${deltaToNegate}`);
return negationDelta;
}
/**
* Check if a delta is a negation delta
*/
static isNegationDelta(delta: Delta): delta is NegationDelta {
return delta.pointers.some(pointer =>
pointer.localContext === 'negates' &&
pointer.targetContext === 'negated_by'
);
}
/**
* Extract the negated delta ID from a negation delta
*/
static getNegatedDeltaId(negationDelta: Delta): DeltaID | null {
const negationPointer = negationDelta.pointers.find(pointer =>
pointer.localContext === 'negates' &&
pointer.targetContext === 'negated_by'
);
if (negationPointer && typeof negationPointer.target === 'string') {
return negationPointer.target;
}
return null;
}
/**
* Find all negation deltas that negate a specific delta
*/
static findNegationsFor(targetDeltaId: DeltaID, deltas: Delta[]): NegationDelta[] {
return deltas
.filter(delta => this.isNegationDelta(delta))
.filter(delta => this.getNegatedDeltaId(delta) === targetDeltaId) as NegationDelta[];
}
/**
* Check if a delta is negated by any negation deltas
*/
static isDeltaNegated(deltaId: DeltaID, deltas: Delta[]): boolean {
return this.findNegationsFor(deltaId, deltas).length > 0;
}
/**
* Filter out negated deltas from a list
* Returns deltas that are not negated by any negation deltas in the list
*/
static filterNegatedDeltas(deltas: Delta[]): Delta[] {
const negatedDeltaIds = new Set<DeltaID>();
// First pass: collect all negated delta IDs
for (const delta of deltas) {
if (this.isNegationDelta(delta)) {
const negatedId = this.getNegatedDeltaId(delta);
if (negatedId) {
negatedDeltaIds.add(negatedId);
}
}
}
// Second pass: filter out negated deltas and negation deltas themselves
return deltas.filter(delta => {
// Exclude negation deltas themselves (they're metadata)
if (this.isNegationDelta(delta)) {
return false;
}
// Exclude deltas that have been negated
if (negatedDeltaIds.has(delta.id)) {
debug(`Filtering out negated delta ${delta.id}`);
return false;
}
return true;
});
}
/**
* Get negation statistics for a list of deltas
*/
static getNegationStats(deltas: Delta[]): {
totalDeltas: number;
negationDeltas: number;
negatedDeltas: number;
effectiveDeltas: number;
negatedDeltaIds: DeltaID[];
negationMap: Map<DeltaID, DeltaID[]>; // negated -> [negating deltas]
} {
const negationDeltas = deltas.filter(d => this.isNegationDelta(d));
const negatedDeltaIds = new Set<DeltaID>();
const negationMap = new Map<DeltaID, DeltaID[]>();
for (const negDelta of negationDeltas) {
const negatedId = this.getNegatedDeltaId(negDelta);
if (negatedId) {
negatedDeltaIds.add(negatedId);
if (!negationMap.has(negatedId)) {
negationMap.set(negatedId, []);
}
negationMap.get(negatedId)!.push(negDelta.id);
}
}
const effectiveDeltas = deltas.length - negationDeltas.length - negatedDeltaIds.size;
return {
totalDeltas: deltas.length,
negationDeltas: negationDeltas.length,
negatedDeltas: negatedDeltaIds.size,
effectiveDeltas,
negatedDeltaIds: Array.from(negatedDeltaIds),
negationMap
};
}
/**
* Apply negations to a delta stream in chronological order
* Later negations can override earlier ones
*/
static applyNegationsChronologically(deltas: Delta[]): Delta[] {
// Sort by timestamp to apply negations in order
const sortedDeltas = [...deltas].sort((a, b) => a.timeCreated - b.timeCreated);
const negatedIds = new Set<DeltaID>();
const unnegatedIds = new Set<DeltaID>();
// Process deltas in chronological order
for (const delta of sortedDeltas) {
if (this.isNegationDelta(delta)) {
const negatedId = this.getNegatedDeltaId(delta);
if (negatedId) {
negatedIds.add(negatedId);
unnegatedIds.delete(negatedId); // Remove from unnegated if it was there
debug(`Chronologically negated delta ${negatedId} at time ${delta.timeCreated}`);
}
} else {
// If this delta was previously negated, it might be reinstated by this newer delta
if (negatedIds.has(delta.id)) {
// Check if there are any negations after this delta's timestamp
const laterNegations = sortedDeltas
.filter(d => d.timeCreated > delta.timeCreated)
.filter(d => this.isNegationDelta(d))
.filter(d => this.getNegatedDeltaId(d) === delta.id);
if (laterNegations.length === 0) {
unnegatedIds.add(delta.id);
negatedIds.delete(delta.id);
}
}
}
}
// Filter based on final negation state
return deltas.filter(delta => {
if (this.isNegationDelta(delta)) {
return false; // Remove negation deltas from final result
}
return !negatedIds.has(delta.id);
});
}
}
// Export a singleton instance for convenience
export const Negation = NegationHelper;

View File

@ -1,8 +1,8 @@
import Debug from "debug";
import EventEmitter from "events";
import {Delta, DeltaID} from "./delta";
import {Lossless} from "./lossless";
import {DomainEntityID, TransactionID} from "./types";
import {Delta, DeltaID} from "../core/delta";
import {Lossless} from "../views/lossless";
import {DomainEntityID, TransactionID} from "../core/types";
const debug = Debug('rz:transactions');
function getDeltaTransactionId(delta: Delta): TransactionID | undefined {

View File

@ -1,12 +1,16 @@
import express, {Router} from "express";
import {Collection} from "../collection-abstract";
import {Delta} from "../delta";
import {Collection} from "../collections";
import {Delta, DeltaFilter} from "../core";
import {RhizomeNode} from "../node";
export class HttpApi {
router = Router();
constructor(readonly rhizomeNode: RhizomeNode) {
this.setupRoutes();
}
private setupRoutes() {
// --------------- deltas ----------------
// Serve list of all deltas accepted
@ -55,6 +59,10 @@ export class HttpApi {
this.router.get("/peers/count", (_req: express.Request, res: express.Response) => {
res.json(this.rhizomeNode.peers.peers.length);
});
// Initialize lossless and query endpoints
this.serveLossless();
this.serveQuery();
}
// serveCollection<T extends Collection>(collection: T) {
@ -141,4 +149,116 @@ export class HttpApi {
});
});
}
serveQuery() {
// Query entities by schema with optional JSON Logic filter
this.router.post('/query/:schemaId', async (req: express.Request, res: express.Response) => {
try {
const { schemaId } = req.params;
const { filter, maxResults, deltaFilter } = req.body;
const options: { maxResults?: number; deltaFilter?: DeltaFilter } = {};
if (maxResults) options.maxResults = maxResults;
if (deltaFilter) {
// Note: deltaFilter would need to be serialized/deserialized properly in a real implementation
console.warn('deltaFilter not supported in HTTP API yet');
}
const result = await this.rhizomeNode.queryEngine.query(schemaId, filter, options);
res.json({
success: true,
data: result
});
} catch (error) {
res.status(500).json({
success: false,
error: error instanceof Error ? error.message : 'Unknown error'
});
}
});
// Get a single entity by ID with schema validation
this.router.get('/query/:schemaId/:entityId', async (req: express.Request, res: express.Response) => {
try {
const { schemaId, entityId } = req.params;
const result = await this.rhizomeNode.queryEngine.queryOne(schemaId, entityId);
if (result) {
res.json({
success: true,
data: result
});
} else {
res.status(404).json({
success: false,
error: 'Entity not found or does not match schema'
});
}
} catch (error) {
res.status(500).json({
success: false,
error: error instanceof Error ? error.message : 'Unknown error'
});
}
});
// Get query engine statistics
this.router.get('/query/stats', (_req: express.Request, res: express.Response) => {
try {
const stats = this.rhizomeNode.queryEngine.getStats();
res.json({
success: true,
data: stats
});
} catch (error) {
res.status(500).json({
success: false,
error: error instanceof Error ? error.message : 'Unknown error'
});
}
});
// List all registered schemas
this.router.get('/schemas', (_req: express.Request, res: express.Response) => {
try {
const schemas = this.rhizomeNode.schemaRegistry.list();
res.json({
success: true,
data: schemas
});
} catch (error) {
res.status(500).json({
success: false,
error: error instanceof Error ? error.message : 'Unknown error'
});
}
});
// Get a specific schema
this.router.get('/schemas/:schemaId', (req: express.Request, res: express.Response) => {
try {
const { schemaId } = req.params;
const schema = this.rhizomeNode.schemaRegistry.get(schemaId);
if (schema) {
res.json({
success: true,
data: schema
});
} else {
res.status(404).json({
success: false,
error: 'Schema not found'
});
}
} catch (error) {
res.status(500).json({
success: false,
error: error instanceof Error ? error.message : 'Unknown error'
});
}
});
}
}

32
src/index.ts Normal file
View File

@ -0,0 +1,32 @@
// Core exports
export * from './core';
// Views exports
export * from './views';
// Collections exports
export { Collection, BasicCollection, RelationalCollection as CollectionRelational, TypedCollectionImpl, SchemaValidationError as CollectionSchemaValidationError } from './collections';
// Features exports
export * from './features';
// Schema exports
export * from './schema';
// Storage exports
export * from './storage';
// Network exports
export * from './network';
// Query exports
export * from './query';
// HTTP exports
export * from './http';
// Configuration
export * from './config';
// Main node
export * from './node';

View File

@ -1,196 +0,0 @@
// Deltas target entities.
// We can maintain a record of all the targeted entities, and the deltas that targeted them
import Debug from 'debug';
import EventEmitter from 'events';
import {Delta, DeltaFilter, DeltaID, DeltaNetworkImageV1} from './delta';
import {RhizomeNode} from './node';
import {Transactions} from './transactions';
import {DomainEntityID, PropertyID, PropertyTypes, TransactionID, ViewMany} from "./types";
const debug = Debug('rz:lossless');
export type CollapsedPointer = {[key: PropertyID]: PropertyTypes};
export type CollapsedDelta = Omit<DeltaNetworkImageV1, 'pointers'> & {
pointers: CollapsedPointer[];
};
export type LosslessViewOne = {
id: DomainEntityID,
referencedAs: string[];
propertyDeltas: {
[key: PropertyID]: CollapsedDelta[]
}
};
export type LosslessViewMany = ViewMany<LosslessViewOne>;
class LosslessEntityMap extends Map<DomainEntityID, LosslessEntity> {};
class LosslessEntity {
properties = new Map<PropertyID, Set<Delta>>();
constructor(readonly lossless: Lossless, readonly id: DomainEntityID) {}
addDelta(delta: Delta) {
const targetContexts = delta.pointers
.filter(({target}) => target === this.id)
.map(({targetContext}) => targetContext)
.filter((targetContext) => typeof targetContext === 'string');
for (const targetContext of targetContexts) {
let propertyDeltas = this.properties.get(targetContext);
if (!propertyDeltas) {
propertyDeltas = new Set<Delta>();
this.properties.set(targetContext, propertyDeltas);
}
propertyDeltas.add(delta);
debug(`[${this.lossless.rhizomeNode.config.peerId}]`, `entity ${this.id} added delta:`, JSON.stringify(delta));
}
}
toJSON() {
const properties: {[key: PropertyID]: number} = {};
for (const [key, deltas] of this.properties.entries()) {
properties[key] = deltas.size;
}
return {
id: this.id,
properties
};
}
}
export class Lossless {
domainEntities = new LosslessEntityMap();
transactions: Transactions;
referencedAs = new Map<string, Set<DomainEntityID>>();
eventStream = new EventEmitter();
constructor(readonly rhizomeNode: RhizomeNode) {
this.transactions = new Transactions(this);
this.transactions.eventStream.on("completed", (transactionId, deltaIds) => {
debug(`[${this.rhizomeNode.config.peerId}]`, `Completed transaction ${transactionId}`);
const transaction = this.transactions.get(transactionId);
if (!transaction) return;
for (const id of transaction.entityIds) {
this.eventStream.emit("updated", id, deltaIds);
}
});
}
ingestDelta(delta: Delta): TransactionID | undefined {
const targets = delta.pointers
.filter(({targetContext}) => !!targetContext)
.map(({target}) => target)
.filter((target) => typeof target === 'string')
for (const target of targets) {
let ent = this.domainEntities.get(target);
if (!ent) {
ent = new LosslessEntity(this, target);
this.domainEntities.set(target, ent);
}
ent.addDelta(delta);
}
for (const {target, localContext} of delta.pointers) {
if (typeof target === "string" && this.domainEntities.has(target)) {
if (this.domainEntities.has(target)) {
let referencedAs = this.referencedAs.get(localContext);
if (!referencedAs) {
referencedAs = new Set<string>();
this.referencedAs.set(localContext, referencedAs);
}
referencedAs.add(target);
}
}
}
const transactionId = this.transactions.ingestDelta(delta, targets);
if (!transactionId) {
// No transaction -- we can issue an update event immediately
for (const id of targets) {
this.eventStream.emit("updated", id, [delta.id]);
}
}
return transactionId;
}
viewSpecific(entityId: DomainEntityID, deltaIds: DeltaID[], deltaFilter?: DeltaFilter): LosslessViewOne | undefined {
const combinedFilter = (delta: Delta) => {
if (!deltaIds.includes(delta.id)) {
debug(`[${this.rhizomeNode.config.peerId}]`, `Excluding delta ${delta.id} because it's not in the requested list of deltas`);
return false;
}
if (!deltaFilter) return true;
return deltaFilter(delta);
};
const res = this.view([entityId], (delta) => combinedFilter(delta));
return res[entityId];
}
view(entityIds?: DomainEntityID[], deltaFilter?: DeltaFilter): LosslessViewMany {
const view: LosslessViewMany = {};
entityIds = entityIds ?? Array.from(this.domainEntities.keys());
for (const id of entityIds) {
const ent = this.domainEntities.get(id);
if (!ent) continue;
const referencedAs = new Set<string>();
const propertyDeltas: {
[key: PropertyID]: CollapsedDelta[]
} = {};
for (const [key, deltas] of ent.properties.entries()) {
propertyDeltas[key] = propertyDeltas[key] || [];
for (const delta of deltas) {
if (deltaFilter && !deltaFilter(delta)) {
continue;
}
// If this delta is part of a transaction,
// we need to be able to wait for the whole transaction.
if (delta.transactionId) {
if (!this.transactions.isComplete(delta.transactionId)) {
// TODO: Test this condition
debug(`[${this.rhizomeNode.config.peerId}]`, `Excluding delta ${delta.id} because transaction ${delta.transactionId} is not completed`);
continue;
}
}
const pointers: CollapsedPointer[] = [];
for (const {localContext, target} of delta.pointers) {
pointers.push({[localContext]: target});
if (target === ent.id) {
referencedAs.add(localContext);
}
}
propertyDeltas[key].push({
...delta,
pointers
});
}
}
view[ent.id] = {
id: ent.id,
referencedAs: Array.from(referencedAs.values()),
propertyDeltas
};
}
return view;
}
// TODO: point-in-time queries
}

View File

@ -1,8 +1,8 @@
import Debug from 'debug';
import EventEmitter from 'node:events';
import objectHash from 'object-hash';
import {Delta} from './delta';
import {RhizomeNode} from './node';
import {Delta} from '../core/delta';
import {RhizomeNode} from '../node';
const debug = Debug('rz:deltas');
enum Decision {

4
src/network/index.ts Normal file
View File

@ -0,0 +1,4 @@
export * from './peers';
export * from './pub-sub';
export * from './request-reply';
export * from './delta-stream';

View File

@ -1,8 +1,8 @@
import Debug from 'debug';
import {Message} from 'zeromq';
import {Delta} from "./delta";
import {RhizomeNode} from "./node";
import {Subscription} from './pub-sub';
import {Delta} from "../core/delta";
import {RhizomeNode} from "../node";
import {Subscription} from '../network/pub-sub';
import {PeerRequest, RequestSocket, ResponseSocket} from "./request-reply";
const debug = Debug('rz:peers');

View File

@ -1,7 +1,7 @@
import Debug from 'debug';
import {Publisher, Subscriber} from 'zeromq';
import {RhizomeNode} from './node';
import {PeerAddress} from './peers';
import {RhizomeNode} from '../node';
import {PeerAddress} from '../network/peers';
const debug = Debug('rz:pub-sub');
export type SubscribedMessageHandler = (sender: PeerAddress, msg: string) => void;

View File

@ -1,8 +1,8 @@
import Debug from 'debug';
import {EventEmitter} from 'node:events';
import {Message, Reply, Request} from 'zeromq';
import {RhizomeNode} from './node';
import {PeerAddress, RequestMethods} from './peers';
import {RhizomeNode} from '../node';
import {PeerAddress, RequestMethods} from '../network/peers';
const debug = Debug('rz:request-reply');
export type PeerRequest = {

View File

@ -1,11 +1,11 @@
import Debug from 'debug';
import {CREATOR, HTTP_API_ADDR, HTTP_API_ENABLE, HTTP_API_PORT, PEER_ID, PUBLISH_BIND_ADDR, PUBLISH_BIND_HOST, PUBLISH_BIND_PORT, REQUEST_BIND_ADDR, REQUEST_BIND_HOST, REQUEST_BIND_PORT, SEED_PEERS} from './config';
import {DeltaStream} from './delta-stream';
import {CREATOR, HTTP_API_ADDR, HTTP_API_ENABLE, HTTP_API_PORT, PEER_ID, PUBLISH_BIND_ADDR, PUBLISH_BIND_HOST, PUBLISH_BIND_PORT, REQUEST_BIND_ADDR, REQUEST_BIND_HOST, REQUEST_BIND_PORT, SEED_PEERS, STORAGE_TYPE, STORAGE_PATH} from './config';
import {DeltaStream, parseAddressList, PeerAddress, Peers, PubSub, RequestReply} from './network';
import {HttpServer} from './http/index';
import {Lossless} from './lossless';
import {parseAddressList, PeerAddress, Peers} from './peers';
import {PubSub} from './pub-sub';
import {RequestReply} from './request-reply';
import {Lossless} from './views';
import {QueryEngine, StorageQueryEngine} from './query';
import {DefaultSchemaRegistry} from './schema';
import {DeltaQueryStorage, StorageFactory, StorageConfig} from './storage';
const debug = Debug('rz:rhizome-node');
export type RhizomeNodeConfig = {
@ -21,6 +21,7 @@ export type RhizomeNodeConfig = {
seedPeers: PeerAddress[];
peerId: string;
creator: string; // TODO each host should be able to support multiple users
storage?: StorageConfig; // Optional storage configuration
};
// So that we can run more than one instance in the same process (for testing)
@ -32,6 +33,10 @@ export class RhizomeNode {
deltaStream: DeltaStream;
lossless: Lossless;
peers: Peers;
queryEngine: QueryEngine;
storageQueryEngine: StorageQueryEngine;
schemaRegistry: DefaultSchemaRegistry;
deltaStorage: DeltaQueryStorage;
myRequestAddr: PeerAddress;
myPublishAddr: PeerAddress;
@ -49,6 +54,10 @@ export class RhizomeNode {
seedPeers: parseAddressList(SEED_PEERS),
peerId: PEER_ID,
creator: CREATOR,
storage: {
type: STORAGE_TYPE as 'memory' | 'leveldb',
path: STORAGE_PATH
},
...config
};
debug(`[${this.config.peerId}]`, 'Config', this.config);
@ -66,11 +75,29 @@ export class RhizomeNode {
this.deltaStream = new DeltaStream(this);
this.peers = new Peers(this);
this.lossless = new Lossless(this);
this.schemaRegistry = new DefaultSchemaRegistry();
// Initialize storage backend
this.deltaStorage = StorageFactory.create(this.config.storage!);
// Initialize query engines (both lossless-based and storage-based)
this.queryEngine = new QueryEngine(this.lossless, this.schemaRegistry);
this.storageQueryEngine = new StorageQueryEngine(this.deltaStorage, this.schemaRegistry);
}
async start(syncOnStart = false) {
// Connect our lossless view to the delta stream
this.deltaStream.subscribeDeltas((delta) => this.lossless.ingestDelta(delta));
this.deltaStream.subscribeDeltas(async (delta) => {
// Ingest into lossless view
this.lossless.ingestDelta(delta);
// Also store in persistent storage
try {
await this.deltaStorage.storeDelta(delta);
} catch (error) {
debug(`[${this.config.peerId}]`, 'Error storing delta to persistent storage:', error);
}
});
// Bind ZeroMQ publish socket
// TODO: Config option to enable zmq pubsub
@ -111,6 +138,44 @@ export class RhizomeNode {
await this.pubSub.stop();
await this.requestReply.stop();
await this.httpServer.stop();
// Close storage
try {
await this.deltaStorage.close();
debug(`[${this.config.peerId}]`, 'Storage closed');
} catch (error) {
debug(`[${this.config.peerId}]`, 'Error closing storage:', error);
}
debug(`[${this.config.peerId}]`, 'Stopped');
}
/**
* Sync existing lossless view data to persistent storage
* Useful for migrating from memory-only to persistent storage
*/
async syncToStorage(): Promise<void> {
debug(`[${this.config.peerId}]`, 'Syncing lossless view to storage');
const allDeltas = this.deltaStream.deltasAccepted;
let synced = 0;
for (const delta of allDeltas) {
try {
await this.deltaStorage.storeDelta(delta);
synced++;
} catch (error) {
debug(`[${this.config.peerId}]`, `Error syncing delta ${delta.id}:`, error);
}
}
debug(`[${this.config.peerId}]`, `Synced ${synced}/${allDeltas.length} deltas to storage`);
}
/**
* Get storage statistics
*/
async getStorageStats() {
return await this.deltaStorage.getStats();
}
}

2
src/query/index.ts Normal file
View File

@ -0,0 +1,2 @@
export { QueryEngine } from './query-engine';
export { StorageQueryEngine, JsonLogic as StorageJsonLogic } from './storage-query-engine';

301
src/query/query-engine.ts Normal file
View File

@ -0,0 +1,301 @@
import { apply } from 'json-logic-js';
import Debug from 'debug';
import { SchemaRegistry, SchemaID, ObjectSchema } from '../schema/schema';
import { Lossless, LosslessViewOne, LosslessViewMany, CollapsedDelta } from '../views/lossless';
import { DomainEntityID } from '../core/types';
import { DeltaFilter } from '../core/delta';
const debug = Debug('rz:query');
export type JsonLogic = Record<string, unknown>;
export interface QueryOptions {
maxResults?: number;
deltaFilter?: DeltaFilter;
}
export interface QueryResult {
entities: LosslessViewMany;
totalFound: number;
limited: boolean;
}
export class QueryEngine {
constructor(
private lossless: Lossless,
private schemaRegistry: SchemaRegistry
) {}
/**
* Query entities by schema type with optional JSON Logic filter
*/
async query(
schemaId: SchemaID,
filter?: JsonLogic,
options: QueryOptions = {}
): Promise<QueryResult> {
debug(`Querying schema ${schemaId} with filter:`, filter);
// 1. Find all entities that could match this schema
const candidateEntityIds = this.discoverEntitiesBySchema(schemaId);
debug(`Found ${candidateEntityIds.length} candidate entities for schema ${schemaId}`);
// 2. Compose lossless views for all candidates
const allViews = this.lossless.compose(candidateEntityIds, options.deltaFilter);
debug(`Composed ${Object.keys(allViews).length} lossless views`);
// 3. Apply JSON Logic filter if provided
let filteredViews: LosslessViewMany = allViews;
if (filter) {
filteredViews = this.applyJsonLogicFilter(allViews, filter, schemaId);
debug(`After filtering: ${Object.keys(filteredViews).length} entities match`);
}
// 4. Apply result limits if specified
const totalFound = Object.keys(filteredViews).length;
let limited = false;
if (options.maxResults && totalFound > options.maxResults) {
const entityIds = Object.keys(filteredViews).slice(0, options.maxResults);
filteredViews = {};
for (const entityId of entityIds) {
filteredViews[entityId] = allViews[entityId];
}
limited = true;
debug(`Limited results to ${options.maxResults} entities`);
}
return {
entities: filteredViews,
totalFound,
limited
};
}
/**
* Query for a single entity by ID with schema validation
*/
async queryOne(schemaId: SchemaID, entityId: DomainEntityID): Promise<LosslessViewOne | null> {
debug(`Querying single entity ${entityId} with schema ${schemaId}`);
const views = this.lossless.compose([entityId]);
const view = views[entityId];
if (!view) {
debug(`Entity ${entityId} not found`);
return null;
}
// Validate that the entity matches the schema
if (!this.entityMatchesSchema(view, schemaId)) {
debug(`Entity ${entityId} does not match schema ${schemaId}`);
return null;
}
return view;
}
/**
* Discover all entities that could potentially match a given schema
* This is a heuristic based on the schema's required properties
*/
private discoverEntitiesBySchema(schemaId: SchemaID): DomainEntityID[] {
const schema = this.schemaRegistry.get(schemaId);
if (!schema) {
debug(`Schema ${schemaId} not found in registry`);
return [];
}
// Strategy: Find entities that have deltas for the schema's required properties
const requiredProperties = schema.requiredProperties || [];
const allEntityIds = Array.from(this.lossless.domainEntities.keys());
if (requiredProperties.length === 0) {
// No required properties - return all entities
debug(`Schema ${schemaId} has no required properties, returning all entities`);
return allEntityIds;
}
// Find entities that have at least one required property
const candidateEntities: DomainEntityID[] = [];
for (const entityId of allEntityIds) {
const entity = this.lossless.domainEntities.get(entityId);
if (!entity) continue;
// Check if entity has deltas for any required property
Review

Maybe this should only include items with all required properties?

Maybe this should only include items with _all_ required properties?
const hasRequiredProperty = requiredProperties.some(propertyId =>
entity.properties.has(propertyId)
);
if (hasRequiredProperty) {
candidateEntities.push(entityId);
}
}
debug(`Found ${candidateEntities.length} entities with required properties for schema ${schemaId}`);
return candidateEntities;
}
/**
* Apply JSON Logic filter to lossless views
* This requires converting each lossless view to a queryable object
*/
private applyJsonLogicFilter(
views: LosslessViewMany,
filter: JsonLogic,
schemaId: SchemaID
): LosslessViewMany {
const schema = this.schemaRegistry.get(schemaId);
if (!schema) {
debug(`Cannot filter without schema ${schemaId}`);
return views;
}
const filteredViews: LosslessViewMany = {};
for (const [entityId, view] of Object.entries(views)) {
// Convert lossless view to queryable object using schema
const queryableObject = this.losslessViewToQueryableObject(view, schema);
try {
// Apply JSON Logic filter
const matches = apply(filter, queryableObject);
if (matches) {
filteredViews[entityId] = view;
debug(`Entity ${entityId} matches filter`);
} else {
debug(`Entity ${entityId} does not match filter`);
}
} catch (error) {
debug(`Error applying filter to entity ${entityId}:`, error);
// Skip entities that cause filter errors
}
}
return filteredViews;
}
/**
* Convert a lossless view to a queryable object based on schema
* Uses simple resolution strategies for now
*/
private losslessViewToQueryableObject(view: LosslessViewOne, schema: ObjectSchema): Record<string, unknown> {
const obj: Record<string, unknown> = {
id: view.id,
_referencedAs: view.referencedAs
};
// Convert each schema property from lossless view deltas
for (const [propertyId, propertySchema] of Object.entries(schema.properties)) {
const deltas = view.propertyDeltas[propertyId] || [];
if (deltas.length === 0) {
obj[propertyId] = null;
continue;
}
// Apply simple resolution strategy based on property schema type
switch (propertySchema.type) {
case 'primitive': {
// Use last-write-wins for primitives
const lastDelta = deltas.sort((a, b) => b.timeCreated - a.timeCreated)[0];
const primitiveValue = this.extractPrimitiveValue(lastDelta, propertyId);
obj[propertyId] = primitiveValue;
break;
}
case 'array': {
// Collect all values as array
const arrayValues = deltas
.map(delta => this.extractPrimitiveValue(delta, propertyId))
.filter(value => value !== null);
obj[propertyId] = arrayValues;
break;
}
case 'reference': {
// For references, include the target IDs
const refValues = deltas
.map(delta => this.extractReferenceValue(delta, propertyId))
.filter(value => value !== null);
obj[propertyId] = refValues;
break;
}
default:
obj[propertyId] = deltas.length;
}
}
debug(`Converted entity ${view.id} to queryable object:`, obj);
return obj;
}
/**
* Extract primitive value from a delta for a given property
*/
private extractPrimitiveValue(delta: CollapsedDelta, _propertyId: string): unknown {
// Look for the value in collapsed pointers
// CollapsedPointer is {[key: PropertyID]: PropertyTypes}
for (const pointer of delta.pointers) {
if (pointer.value !== undefined) {
return pointer.value;
}
}
return null;
}
/**
* Extract reference value (target ID) from a delta for a given property
Review

for a given property

It seems like a bug that propertyId is not referenced in this function

> for a given property It seems like a bug that `propertyId` is not referenced in this function
*/
private extractReferenceValue(delta: CollapsedDelta, _propertyId: string): string | null {
// For references, we want the value pointer that contains the reference ID
for (const pointer of delta.pointers) {
if (pointer.value !== undefined && typeof pointer.value === 'string') {
return pointer.value;
}
}
return null;
}
/**
* Check if an entity matches a schema (basic validation)
*/
private entityMatchesSchema(view: LosslessViewOne, schemaId: SchemaID): boolean {
const schema = this.schemaRegistry.get(schemaId);
if (!schema) return false;
// Check that all required properties have at least one delta
const requiredProperties = schema.requiredProperties || [];
for (const propertyId of requiredProperties) {
const deltas = view.propertyDeltas[propertyId];
if (!deltas || deltas.length === 0) {
debug(`Entity ${view.id} missing required property ${propertyId} for schema ${schemaId}`);
return false;
}
}
return true;
}
/**
* Get statistics about queryable entities
*/
getStats() {
const totalEntities = this.lossless.domainEntities.size;
const registeredSchemas = this.schemaRegistry.list().length;
return {
totalEntities,
registeredSchemas,
schemasById: this.schemaRegistry.list().reduce((acc, schema) => {
acc[schema.id] = this.discoverEntitiesBySchema(schema.id).length;
return acc;
}, {} as Record<string, number>)
};
}
}

View File

@ -0,0 +1,337 @@
import { apply } from 'json-logic-js';
import Debug from 'debug';
import { SchemaRegistry, SchemaID, ObjectSchema } from '../schema';
import { DeltaQueryStorage, DeltaQuery } from '../storage/interface';
import { DomainEntityID } from '../core/types';
import { Delta, DeltaFilter } from '../core/delta';
const debug = Debug('rz:storage-query');
export type JsonLogic = Record<string, unknown>;
export interface StorageQueryOptions {
maxResults?: number;
deltaFilter?: DeltaFilter;
useIndexes?: boolean; // Whether to use storage indexes for optimization
}
export interface StorageQueryResult {
entities: StorageEntityResult[];
totalFound: number;
limited: boolean;
queryTime: number; // milliseconds
}
export interface StorageEntityResult {
entityId: DomainEntityID;
deltas: Delta[];
properties: Record<string, unknown>; // Resolved properties for filtering
}
/**
* Query engine that works directly with storage backends
* Supports both in-memory and persistent storage with optimizations
*/
export class StorageQueryEngine {
constructor(
private storage: DeltaQueryStorage,
private schemaRegistry: SchemaRegistry
) {}
/**
* Query entities by schema type with optional JSON Logic filter
* This version works directly with the storage layer for better performance
*/
async query(
schemaId: SchemaID,
filter?: JsonLogic,
options: StorageQueryOptions = {}
): Promise<StorageQueryResult> {
const startTime = Date.now();
debug(`Querying schema ${schemaId} with filter:`, filter);
const schema = this.schemaRegistry.get(schemaId);
if (!schema) {
throw new Error(`Schema ${schemaId} not found`);
}
// 1. Use storage queries to find candidate deltas efficiently
const candidateDeltas = await this.findCandidateDeltas(schema, options);
debug(`Found ${candidateDeltas.length} candidate deltas`);
// 2. Group deltas by entity
const entityGroups = this.groupDeltasByEntity(candidateDeltas, schema);
debug(`Grouped into ${entityGroups.length} entities`);
// 3. Resolve properties for filtering
const entityResults: StorageEntityResult[] = [];
for (const group of entityGroups) {
const properties = this.resolveEntityProperties(group.deltas, schema);
entityResults.push({
entityId: group.entityId,
deltas: group.deltas,
properties
});
}
// 4. Apply JSON Logic filter if provided
let filteredResults = entityResults;
if (filter) {
filteredResults = this.applyJsonLogicFilter(entityResults, filter);
debug(`After filtering: ${filteredResults.length} entities match`);
}
// 5. Apply result limits
const totalFound = filteredResults.length;
let limited = false;
if (options.maxResults && totalFound > options.maxResults) {
filteredResults = filteredResults.slice(0, options.maxResults);
limited = true;
debug(`Limited results to ${options.maxResults} entities`);
}
const queryTime = Date.now() - startTime;
debug(`Query completed in ${queryTime}ms`);
return {
entities: filteredResults,
totalFound,
limited,
queryTime
};
}
/**
* Query for a single entity by ID with schema validation
*/
async queryOne(schemaId: SchemaID, entityId: DomainEntityID): Promise<StorageEntityResult | null> {
debug(`Querying single entity ${entityId} with schema ${schemaId}`);
const schema = this.schemaRegistry.get(schemaId);
if (!schema) {
throw new Error(`Schema ${schemaId} not found`);
}
// Get all deltas for this entity
const deltas = await this.storage.getDeltasForEntity(entityId);
if (deltas.length === 0) {
return null;
}
// Resolve properties and validate against schema
const properties = this.resolveEntityProperties(deltas, schema);
// Basic schema validation - check required properties
if (!this.entityMatchesSchema(properties, schema)) {
debug(`Entity ${entityId} does not match schema ${schemaId}`);
return null;
}
return {
entityId,
deltas,
properties
};
}
/**
* Find candidate deltas based on schema requirements
*/
private async findCandidateDeltas(schema: ObjectSchema, options: StorageQueryOptions): Promise<Delta[]> {
const requiredProperties = schema.requiredProperties || [];
if (requiredProperties.length === 0) {
// No required properties - get all deltas (with optional filter)
return await this.storage.getAllDeltas(options.deltaFilter);
}
// Use storage query optimization if available
if (options.useIndexes !== false && 'queryDeltas' in this.storage) {
const deltaQuery: DeltaQuery = {
contexts: requiredProperties,
// Add other query optimizations based on schema
};
return await this.storage.queryDeltas(deltaQuery);
}
// Fallback: get all deltas and filter
return await this.storage.getAllDeltas(options.deltaFilter);
}
/**
* Group deltas by the entities they reference
*/
private groupDeltasByEntity(deltas: Delta[], schema: ObjectSchema): { entityId: DomainEntityID; deltas: Delta[] }[] {
const entityMap = new Map<DomainEntityID, Delta[]>();
for (const delta of deltas) {
// Find entity references in this delta
const entityIds = this.extractEntityIds(delta, schema);
for (const entityId of entityIds) {
if (!entityMap.has(entityId)) {
entityMap.set(entityId, []);
}
entityMap.get(entityId)!.push(delta);
}
}
return Array.from(entityMap.entries()).map(([entityId, deltas]) => ({
entityId,
deltas
}));
}
/**
* Extract entity IDs from a delta based on schema context
*/
private extractEntityIds(delta: Delta, schema: ObjectSchema): DomainEntityID[] {
const entityIds: DomainEntityID[] = [];
for (const pointer of delta.pointers) {
// Check if this pointer references an entity with a property defined in the schema
if (typeof pointer.target === 'string' &&
pointer.targetContext &&
schema.properties[pointer.targetContext]) {
entityIds.push(pointer.target);
}
}
return [...new Set(entityIds)]; // Remove duplicates
}
/**
* Resolve entity properties from deltas for query filtering
*/
private resolveEntityProperties(deltas: Delta[], schema: ObjectSchema): Record<string, unknown> {
const properties: Record<string, unknown> = {};
// Group deltas by property context
const propertyDeltas = new Map<string, Delta[]>();
for (const delta of deltas) {
for (const pointer of delta.pointers) {
if (pointer.targetContext && schema.properties[pointer.targetContext]) {
if (!propertyDeltas.has(pointer.targetContext)) {
propertyDeltas.set(pointer.targetContext, []);
}
propertyDeltas.get(pointer.targetContext)!.push(delta);
}
}
}
// Resolve each property using simple last-write-wins strategy
for (const [propertyId, propertySchema] of Object.entries(schema.properties)) {
const propDeltas = propertyDeltas.get(propertyId) || [];
if (propDeltas.length === 0) {
properties[propertyId] = null;
continue;
}
// Apply simple resolution strategy based on property schema type
switch (propertySchema.type) {
case 'primitive': {
// Use last-write-wins for primitives
const lastDelta = propDeltas.sort((a, b) => b.timeCreated - a.timeCreated)[0];
properties[propertyId] = this.extractPrimitiveValue(lastDelta, propertyId);
break;
}
case 'array': {
// Collect all values as array
const arrayValues = propDeltas
.map(delta => this.extractPrimitiveValue(delta, propertyId))
.filter(value => value !== null);
properties[propertyId] = arrayValues;
break;
}
case 'reference': {
// For references, include the target IDs
const refValues = propDeltas
.map(delta => this.extractReferenceValue(delta, propertyId))
.filter(value => value !== null);
properties[propertyId] = refValues;
break;
}
default:
properties[propertyId] = propDeltas.length;
}
}
return properties;
}
/**
* Extract primitive value from a delta for a given property
*/
private extractPrimitiveValue(delta: Delta, _propertyId: string): unknown {
for (const pointer of delta.pointers) {
if (pointer.localContext === 'value') {
return pointer.target;
}
}
return null;
}
/**
* Extract reference value (target ID) from a delta for a given property
*/
private extractReferenceValue(delta: Delta, _propertyId: string): string | null {
for (const pointer of delta.pointers) {
if (pointer.localContext === 'value' && typeof pointer.target === 'string') {
return pointer.target;
}
}
return null;
}
/**
* Apply JSON Logic filter to entity results
*/
private applyJsonLogicFilter(entityResults: StorageEntityResult[], filter: JsonLogic): StorageEntityResult[] {
return entityResults.filter(entityResult => {
try {
const matches = apply(filter, entityResult.properties);
return matches;
} catch (error) {
debug(`Error applying filter to entity ${entityResult.entityId}:`, error);
return false;
}
});
}
/**
* Check if an entity matches a schema (basic validation)
*/
private entityMatchesSchema(properties: Record<string, unknown>, schema: ObjectSchema): boolean {
const requiredProperties = schema.requiredProperties || [];
for (const propertyId of requiredProperties) {
if (properties[propertyId] === null || properties[propertyId] === undefined) {
return false;
}
}
return true;
}
/**
* Get query engine statistics
*/
async getStats() {
const storageStats = await this.storage.getStats();
const registeredSchemas = this.schemaRegistry.list().length;
return {
storage: storageStats,
registeredSchemas,
storageType: this.storage.constructor.name
};
}
}

2
src/schema/index.ts Normal file
View File

@ -0,0 +1,2 @@
export * from './schema';
export * from './schema-registry';

View File

@ -0,0 +1,805 @@
import Debug from 'debug';
import {
SchemaRegistry,
ObjectSchema,
SchemaID,
SchemaValidationResult,
SchemaValidationError,
PropertySchema,
PrimitiveSchema,
ReferenceSchema,
ArraySchema,
SchemaAppliedView,
SchemaAppliedViewWithNesting,
SchemaApplicationOptions,
ResolutionContext
} from '../schema/schema';
import { LosslessViewOne, Lossless } from '../views/lossless';
import { DomainEntityID, PropertyID, PropertyTypes } from '../core/types';
import { CollapsedDelta } from '../views/lossless';
const debug = Debug('rz:schema-registry');
export class DefaultSchemaRegistry implements SchemaRegistry {
schemas = new Map<SchemaID, ObjectSchema>();
register(schema: ObjectSchema): void {
this.validateSchemaStructure(schema);
this.schemas.set(schema.id, schema);
debug(`Registered schema: ${schema.id} (${schema.name})`);
}
get(id: SchemaID): ObjectSchema | undefined {
return this.schemas.get(id);
}
list(): ObjectSchema[] {
return Array.from(this.schemas.values());
}
private validateSchemaStructure(schema: ObjectSchema): void {
if (!schema.id || typeof schema.id !== 'string') {
throw new Error('Schema must have a valid string id');
}
if (!schema.name || typeof schema.name !== 'string') {
throw new Error('Schema must have a valid string name');
}
if (!schema.properties || typeof schema.properties !== 'object') {
throw new Error('Schema must have properties object');
}
// Validate property schemas
for (const [propertyId, propertySchema] of Object.entries(schema.properties)) {
this.validatePropertySchema(propertySchema, `${schema.id}.${propertyId}`);
}
// Validate required properties exist
if (schema.requiredProperties) {
for (const required of schema.requiredProperties) {
if (!(required in schema.properties)) {
throw new Error(`Required property '${required}' not found in schema '${schema.id}'`);
}
}
}
}
private validatePropertySchema(schema: PropertySchema, path: string): void {
switch (schema.type) {
case 'primitive':
this.validatePrimitiveSchema(schema, path);
break;
case 'reference':
this.validateReferenceSchema(schema, path);
break;
case 'array':
this.validateArraySchema(schema, path);
break;
default:
throw new Error(`Unknown schema type at ${path}`);
}
}
private validatePrimitiveSchema(schema: PrimitiveSchema, path: string): void {
const validTypes = ['string', 'number', 'boolean', 'null'];
if (!validTypes.includes(schema.primitiveType)) {
throw new Error(`Invalid primitive type '${schema.primitiveType}' at ${path}`);
}
}
private validateReferenceSchema(schema: ReferenceSchema, path: string): void {
if (!schema.targetSchema || typeof schema.targetSchema !== 'string') {
throw new Error(`Reference schema must have valid targetSchema at ${path}`);
}
if (schema.maxDepth !== undefined && schema.maxDepth < 0) {
throw new Error(`Reference maxDepth must be non-negative at ${path}`);
}
}
private validateArraySchema(schema: ArraySchema, path: string): void {
this.validatePropertySchema(schema.itemSchema, `${path}[]`);
if (schema.maxItems !== undefined && schema.maxItems < 0) {
throw new Error(`Array maxItems must be non-negative at ${path}`);
}
}
validate(entityId: DomainEntityID, schemaId: SchemaID, view: LosslessViewOne): SchemaValidationResult {
const schema = this.get(schemaId);
if (!schema) {
return {
valid: false,
errors: [{
property: '',
message: `Schema '${schemaId}' not found`
}],
warnings: []
};
}
const errors: SchemaValidationError[] = [];
const warnings: SchemaValidationError[] = [];
// Check required properties
if (schema.requiredProperties) {
for (const required of schema.requiredProperties) {
if (!(required in view.propertyDeltas) || view.propertyDeltas[required].length === 0) {
errors.push({
property: required,
message: `Required property '${required}' is missing or has no deltas`
});
}
}
}
// Validate each property in the view
for (const [propertyId, deltas] of Object.entries(view.propertyDeltas)) {
const propertySchema = schema.properties[propertyId];
if (!propertySchema) {
if (schema.additionalProperties === false) {
warnings.push({
property: propertyId,
message: `Property '${propertyId}' not defined in schema and additionalProperties is false`
});
}
continue;
}
// Validate each delta for this property
for (const delta of deltas) {
const validationResult = this.validateDeltaAgainstPropertySchema(
delta,
propertySchema,
propertyId
);
errors.push(...validationResult.errors);
warnings.push(...validationResult.warnings);
}
}
// Validate properties defined in schema but missing from view
for (const [propertyId, propertySchema] of Object.entries(schema.properties)) {
if (!(propertyId in view.propertyDeltas)) {
if (propertySchema.required) {
errors.push({
property: propertyId,
message: `Required property '${propertyId}' is missing from view`
});
}
}
}
return {
valid: errors.length === 0,
errors,
warnings
};
}
private validateDeltaAgainstPropertySchema(
delta: CollapsedDelta,
schema: PropertySchema,
propertyId: PropertyID
): SchemaValidationResult {
const errors: SchemaValidationError[] = [];
const warnings: SchemaValidationError[] = [];
// Extract the value from the delta
const valuePointer = delta.pointers.find(p => p[propertyId] !== undefined);
if (!valuePointer) {
errors.push({
property: propertyId,
message: `Delta does not contain expected property '${propertyId}'`
});
return { valid: false, errors, warnings };
}
const value = valuePointer[propertyId];
switch (schema.type) {
case 'primitive':
this.validatePrimitiveValue(value, schema, propertyId, errors);
break;
case 'reference':
this.validateReferenceValue(value, schema, propertyId, errors, warnings);
break;
case 'array':
// Arrays are complex - for now just warn that we don't fully validate them
warnings.push({
property: propertyId,
message: `Array validation not fully implemented for property '${propertyId}'`
});
break;
}
return {
valid: errors.length === 0,
errors,
warnings
};
}
private validatePrimitiveValue(
value: unknown,
schema: PrimitiveSchema,
propertyId: PropertyID,
errors: SchemaValidationError[]
): void {
let valid = false;
switch (schema.primitiveType) {
case 'string':
valid = typeof value === 'string';
break;
case 'number':
valid = typeof value === 'number';
break;
case 'boolean':
valid = typeof value === 'boolean';
break;
case 'null':
valid = value === null;
break;
}
if (!valid) {
errors.push({
property: propertyId,
message: `Expected ${schema.primitiveType} but got ${typeof value}`,
expectedType: schema.primitiveType,
actualValue: value
});
}
}
private validateReferenceValue(
value: unknown,
schema: ReferenceSchema,
propertyId: PropertyID,
errors: SchemaValidationError[],
warnings: SchemaValidationError[]
): void {
if (typeof value !== 'string') {
errors.push({
property: propertyId,
message: `Reference value must be a string (entity ID), got ${typeof value}`,
expectedType: 'string (entity ID)',
actualValue: value
});
return;
}
// Check if target schema exists
const targetSchema = this.get(schema.targetSchema);
if (!targetSchema) {
warnings.push({
property: propertyId,
message: `Target schema '${schema.targetSchema}' not found for reference`
});
}
}
applySchema(
view: LosslessViewOne,
schemaId: SchemaID,
options: SchemaApplicationOptions = {}
): SchemaAppliedView {
const schema = this.get(schemaId);
if (!schema) {
throw new Error(`Schema '${schemaId}' not found`);
}
const { includeMetadata = true, strictValidation = false, maxDepth: _maxDepth = 3 } = options;
const appliedView: SchemaAppliedView = {
id: view.id,
schemaId,
properties: {}
};
// Apply schema to each property
for (const [propertyId, propertySchema] of Object.entries(schema.properties)) {
const deltas = view.propertyDeltas[propertyId] || [];
const validationResult = this.validate(view.id, schemaId, view);
appliedView.properties[propertyId] = {
deltas,
schema: propertySchema,
validationResult
};
// If strict validation is enabled and there are errors, throw
if (strictValidation && !validationResult.valid) {
throw new Error(`Schema validation failed for property '${propertyId}': ${validationResult.errors[0]?.message}`);
}
}
// Add metadata if requested
if (includeMetadata) {
appliedView.metadata = {
appliedAt: Date.now(),
depth: 1,
truncated: false
};
}
return appliedView;
}
/**
* Apply schema with nested object resolution
* Resolves references to other entities according to schema specifications
*/
applySchemaWithNesting(
view: LosslessViewOne,
schemaId: SchemaID,
losslessView: Lossless,
options: SchemaApplicationOptions = {}
): SchemaAppliedViewWithNesting {
const { maxDepth = 3, includeMetadata = true, strictValidation = false } = options;
const resolutionContext = new ResolutionContext(maxDepth);
return this.resolveNestedView(
view,
schemaId,
losslessView,
resolutionContext,
{ includeMetadata, strictValidation }
);
}
private resolveNestedView(
view: LosslessViewOne,
schemaId: SchemaID,
losslessView: Lossless,
context: ResolutionContext,
options: { includeMetadata: boolean; strictValidation: boolean }
): SchemaAppliedViewWithNesting {
const schema = this.get(schemaId);
if (!schema) {
throw new Error(`Schema '${schemaId}' not found`);
}
// Check for circular reference
if (context.hasVisited(view.id, schemaId)) {
return this.createTruncatedView(view.id, schemaId, context.currentDepth, true);
}
// Check depth limit
if (context.currentDepth >= context.maxDepth) {
return this.createTruncatedView(view.id, schemaId, context.currentDepth, true);
}
// Mark this entity/schema combination as visited
context.visit(view.id, schemaId);
const appliedView: SchemaAppliedViewWithNesting = {
id: view.id,
schemaId,
properties: {},
nestedObjects: {}
};
// Validate the view once
const overallValidationResult = this.validate(view.id, schemaId, view);
// Process each property
for (const [propertyId, propertySchema] of Object.entries(schema.properties)) {
const deltas = view.propertyDeltas[propertyId] || [];
appliedView.properties[propertyId] = {
deltas,
schema: propertySchema,
validationResult: overallValidationResult
};
// Handle reference resolution
if (propertySchema.type === 'reference') {
const referenceSchema = propertySchema as ReferenceSchema;
const nestedViews = this.resolveReferenceProperty(
deltas,
referenceSchema,
losslessView,
context.withDepth(context.currentDepth + 1),
options,
view.id
);
if (nestedViews.length > 0) {
appliedView.nestedObjects[propertyId] = nestedViews;
}
} else if (propertySchema.type === 'array' && propertySchema.itemSchema?.type === 'reference') {
const arraySchema = propertySchema as ArraySchema;
const referenceSchema = arraySchema.itemSchema as ReferenceSchema;
const nestedViews = this.resolveReferenceProperty(
deltas,
referenceSchema,
losslessView,
context.withDepth(context.currentDepth + 1),
options,
view.id
);
if (nestedViews.length > 0) {
appliedView.nestedObjects[propertyId] = nestedViews;
}
}
// Validation error handling
if (options.strictValidation && !overallValidationResult.valid) {
throw new Error(`Schema validation failed for property '${propertyId}': ${overallValidationResult.errors[0]?.message}`);
}
}
// Add metadata
if (options.includeMetadata) {
appliedView.metadata = {
appliedAt: Date.now(),
depth: context.currentDepth,
truncated: context.currentDepth >= context.maxDepth
};
}
// Mark as unvisited when leaving this path
context.unvisit(view.id, schemaId);
return appliedView;
}
private resolveReferenceProperty(
deltas: CollapsedDelta[],
referenceSchema: ReferenceSchema,
losslessView: Lossless,
context: ResolutionContext,
options: { includeMetadata: boolean; strictValidation: boolean },
parentEntityId: string
): SchemaAppliedViewWithNesting[] {
const resolvedViews: SchemaAppliedViewWithNesting[] = [];
const referenceDepthLimit = referenceSchema.maxDepth || context.maxDepth;
// Check if we're at the reference's specific depth limit
if (context.currentDepth >= referenceDepthLimit) {
return [];
}
// Create composite objects from deltas - one per delta
for (const delta of deltas) {
try {
const compositeObject = this.createCompositeObjectFromDelta(
delta,
parentEntityId,
referenceSchema.targetSchema,
losslessView,
context,
options
);
if (compositeObject) {
resolvedViews.push(compositeObject);
} else {
// Fall back to original logic for single entity references
const referenceIds = this.extractReferenceIdsFromDelta(delta, parentEntityId);
for (const referenceId of referenceIds) {
try {
// Get the referenced entity's lossless view
const referencedViews = losslessView.view([referenceId]);
const referencedView = referencedViews[referenceId];
if (referencedView) {
// Recursively resolve the referenced entity with its target schema
const nestedView = this.resolveNestedView(
referencedView,
referenceSchema.targetSchema,
losslessView,
context,
options
);
resolvedViews.push(nestedView);
}
} catch (error) {
// Handle resolution errors gracefully
console.warn(`Failed to resolve reference ${referenceId}:`, error);
}
}
}
} catch (error) {
// Handle resolution errors gracefully
console.warn(`Failed to resolve composite object from delta ${delta.id}:`, error);
}
}
return resolvedViews;
}
private createCompositeObjectFromDelta(
delta: CollapsedDelta,
parentEntityId: string,
targetSchema: SchemaID,
losslessView: Lossless,
context: ResolutionContext,
options: { includeMetadata: boolean; strictValidation: boolean }
): SchemaAppliedViewWithNesting | null {
// Group pointers by localContext, excluding the parent pointer
const pointersByContext: { [localContext: string]: PropertyTypes[] } = {};
let entityReferenceCount = 0;
let scalarCount = 0;
for (const pointer of delta.pointers) {
for (const [localContext, target] of Object.entries(pointer)) {
// Skip the pointer that references the parent entity (the "up" pointer)
if (typeof target === 'string' && target === parentEntityId) {
continue;
}
if (!pointersByContext[localContext]) {
pointersByContext[localContext] = [];
}
pointersByContext[localContext].push(target);
// Count entity references vs scalars
if (typeof target === 'string') {
const referencedViews = losslessView.view([target]);
if (referencedViews[target]) {
entityReferenceCount++;
} else {
scalarCount++;
}
} else {
scalarCount++;
}
}
}
// If no non-parent pointers found, return null
if (Object.keys(pointersByContext).length === 0) {
return null;
}
// Only create composite objects for deltas with multiple entity references or mixed entity/scalar
// Single entity reference should use the original behavior
if (entityReferenceCount === 1 && scalarCount === 0) {
return null; // Let the original logic handle single entity references
}
// Create the composite object
const nestedObjects: { [propertyId: string]: SchemaAppliedViewWithNesting[] } = {};
const scalarProperties: { [key: string]: PropertyTypes | PropertyTypes[] } = {};
for (const [localContext, targets] of Object.entries(pointersByContext)) {
if (targets.length === 1) {
const target = targets[0];
if (typeof target === 'string') {
// Try to resolve as entity reference
try {
const referencedViews = losslessView.view([target]);
const referencedView = referencedViews[target];
if (referencedView) {
// Recursively resolve the referenced entity
const nestedView = this.resolveNestedView(
referencedView,
targetSchema,
losslessView,
context,
options
);
nestedObjects[localContext] = [nestedView];
} else {
// Not a valid entity reference, treat as scalar
scalarProperties[localContext] = target;
}
} catch (_error) {
// Failed to resolve as entity, treat as scalar
scalarProperties[localContext] = target;
}
} else {
// Scalar value
scalarProperties[localContext] = target;
}
} else {
// Multiple values for same localContext - create array
const resolvedArray: (PropertyTypes | SchemaAppliedViewWithNesting)[] = [];
for (const target of targets) {
if (typeof target === 'string') {
// Try to resolve as entity reference
try {
const referencedViews = losslessView.view([target]);
const referencedView = referencedViews[target];
if (referencedView) {
const nestedView = this.resolveNestedView(
referencedView,
targetSchema,
losslessView,
context,
options
);
resolvedArray.push(nestedView);
} else {
// Not a valid entity reference, treat as scalar
resolvedArray.push(target);
}
} catch (_error) {
// Failed to resolve as entity, treat as scalar
resolvedArray.push(target);
}
} else {
// Scalar value
resolvedArray.push(target);
}
}
// Separate entities from scalars in the array
const entities: SchemaAppliedViewWithNesting[] = [];
const scalars: PropertyTypes[] = [];
for (const item of resolvedArray) {
if (typeof item === 'object' && item !== null && 'schemaId' in item) {
entities.push(item as SchemaAppliedViewWithNesting);
} else {
scalars.push(item as PropertyTypes);
}
}
if (entities.length > 0) {
nestedObjects[localContext] = entities;
}
if (scalars.length > 0) {
scalarProperties[localContext] = scalars.length === 1 ? scalars[0] : scalars;
}
}
}
// Create a synthetic composite object
const compositeObject = {
id: `composite-${delta.id}`, // Synthetic ID for the composite object
schemaId: targetSchema,
properties: scalarProperties, // Custom field for scalar values
nestedObjects,
metadata: {
appliedAt: Date.now(),
depth: context.currentDepth,
truncated: false
}
};
return compositeObject as unknown as SchemaAppliedViewWithNesting;
}
private extractReferenceIdsFromDelta(delta: CollapsedDelta, parentEntityId: string): string[] {
const referenceIds = new Set<string>();
// For each pointer in the delta, collect all values that aren't the parent entity
for (const pointer of delta.pointers) {
for (const [_key, value] of Object.entries(pointer)) {
if (typeof value === 'string' && value !== parentEntityId) {
// This is a potential reference - any string value that's not the parent
referenceIds.add(value);
} else if (typeof value === 'object' && value !== null) {
// For object values, collect the entity IDs (keys) that aren't the parent
for (const entityId of Object.keys(value)) {
if (typeof entityId === 'string' && entityId !== parentEntityId) {
referenceIds.add(entityId);
}
}
}
}
}
return Array.from(referenceIds);
}
private extractReferenceIds(deltas: CollapsedDelta[], parentEntityId: string): string[] {
const referenceIds = new Set<string>();
for (const delta of deltas) {
// For each pointer in the delta, collect all values that aren't the parent entity
for (const pointer of delta.pointers) {
for (const [_key, value] of Object.entries(pointer)) {
if (typeof value === 'string' && value !== parentEntityId) {
// This is a potential reference - any string value that's not the parent
referenceIds.add(value);
} else if (typeof value === 'object' && value !== null) {
// For object values, collect the entity IDs (keys) that aren't the parent
for (const entityId of Object.keys(value)) {
if (typeof entityId === 'string' && entityId !== parentEntityId) {
referenceIds.add(entityId);
}
}
}
}
}
}
return Array.from(referenceIds);
}
private createTruncatedView(
entityId: string,
schemaId: SchemaID,
depth: number,
truncated: boolean
): SchemaAppliedViewWithNesting {
return {
id: entityId,
schemaId,
properties: {},
nestedObjects: {},
metadata: {
appliedAt: Date.now(),
depth,
truncated
}
};
}
// Helper method to resolve circular dependencies
getDependencyGraph(): Map<SchemaID, Set<SchemaID>> {
const dependencies = new Map<SchemaID, Set<SchemaID>>();
for (const schema of this.schemas.values()) {
const deps = new Set<SchemaID>();
this.collectSchemaDependencies(schema, deps);
dependencies.set(schema.id, deps);
}
return dependencies;
}
private collectSchemaDependencies(schema: ObjectSchema, deps: Set<SchemaID>): void {
for (const propertySchema of Object.values(schema.properties)) {
this.collectPropertySchemaDependencies(propertySchema, deps);
}
}
private collectPropertySchemaDependencies(schema: PropertySchema, deps: Set<SchemaID>): void {
switch (schema.type) {
case 'reference':
deps.add(schema.targetSchema);
break;
case 'array':
this.collectPropertySchemaDependencies(schema.itemSchema, deps);
break;
}
}
// Check for circular dependencies
hasCircularDependencies(): boolean {
const dependencies = this.getDependencyGraph();
const visited = new Set<SchemaID>();
const recursionStack = new Set<SchemaID>();
for (const schemaId of dependencies.keys()) {
if (this.hasCircularDependencyDFS(schemaId, dependencies, visited, recursionStack)) {
return true;
}
}
return false;
}
private hasCircularDependencyDFS(
schemaId: SchemaID,
dependencies: Map<SchemaID, Set<SchemaID>>,
visited: Set<SchemaID>,
recursionStack: Set<SchemaID>
): boolean {
if (recursionStack.has(schemaId)) {
return true; // Circular dependency found
}
if (visited.has(schemaId)) {
return false; // Already processed
}
visited.add(schemaId);
recursionStack.add(schemaId);
const deps = dependencies.get(schemaId) || new Set();
for (const dep of deps) {
if (this.hasCircularDependencyDFS(dep, dependencies, visited, recursionStack)) {
return true;
}
}
recursionStack.delete(schemaId);
return false;
}
}

291
src/schema/schema.ts Normal file
View File

@ -0,0 +1,291 @@
import { DomainEntityID, PropertyID, PropertyTypes } from "../core/types";
import { LosslessViewOne } from "../views/lossless";
import { CollapsedDelta } from "../views/lossless";
// Base schema types
export type SchemaID = string;
// Primitive schema types - these terminate the recursion
export type PrimitiveSchemaType = 'string' | 'number' | 'boolean' | 'null';
export interface PrimitiveSchema {
type: 'primitive';
primitiveType: PrimitiveSchemaType;
required?: boolean;
default?: PropertyTypes;
}
// Reference schema for linking to other objects
export interface ReferenceSchema {
type: 'reference';
targetSchema: SchemaID; // Reference to another schema by ID
required?: boolean;
maxDepth?: number; // Prevent infinite recursion
}
// Array schema for collections of values
export interface ArraySchema {
type: 'array';
itemSchema: PropertySchema;
required?: boolean;
maxItems?: number;
}
// Union type for all property schema types
export type PropertySchema = PrimitiveSchema | ReferenceSchema | ArraySchema;
// Object schema defines the structure of an entity
export interface ObjectSchema {
id: SchemaID;
name: string;
description?: string;
properties: {
[propertyId: PropertyID]: PropertySchema;
};
requiredProperties?: PropertyID[];
additionalProperties?: boolean; // Allow properties not in schema
}
// Schema registry manages all schemas
export interface SchemaRegistry {
schemas: Map<SchemaID, ObjectSchema>;
register(schema: ObjectSchema): void;
get(id: SchemaID): ObjectSchema | undefined;
list(): ObjectSchema[];
validate(entityId: DomainEntityID, schemaId: SchemaID, view: LosslessViewOne): SchemaValidationResult;
}
// Validation result types
export interface SchemaValidationError {
property: PropertyID;
message: string;
expectedType?: string;
actualValue?: unknown;
}
export interface SchemaValidationResult {
valid: boolean;
errors: SchemaValidationError[];
warnings: SchemaValidationError[];
}
// Schema application options
export interface SchemaApplicationOptions {
maxDepth?: number;
includeMetadata?: boolean;
strictValidation?: boolean;
}
// Applied schema result - a lossless view filtered through a schema
export interface SchemaAppliedView {
id: DomainEntityID;
schemaId: SchemaID;
properties: {
[propertyId: PropertyID]: {
deltas: CollapsedDelta[];
schema: PropertySchema;
validationResult: SchemaValidationResult;
};
};
metadata?: {
appliedAt: number;
depth: number;
truncated: boolean;
};
}
// Extended schema applied view with nested object resolution
export interface SchemaAppliedViewWithNesting extends SchemaAppliedView {
nestedObjects: {
[propertyId: PropertyID]: SchemaAppliedViewWithNesting[];
};
}
// Schema-based collection interface
export interface TypedCollection<T> {
schema: ObjectSchema;
validate(entity: T): SchemaValidationResult;
apply(view: LosslessViewOne): SchemaAppliedView;
}
// Built-in schema helpers
export const PrimitiveSchemas = {
string: (): PrimitiveSchema => ({ type: 'primitive', primitiveType: 'string' }),
number: (): PrimitiveSchema => ({ type: 'primitive', primitiveType: 'number' }),
boolean: (): PrimitiveSchema => ({ type: 'primitive', primitiveType: 'boolean' }),
null: (): PrimitiveSchema => ({ type: 'primitive', primitiveType: 'null' }),
requiredString: (): PrimitiveSchema => ({ type: 'primitive', primitiveType: 'string', required: true }),
requiredNumber: (): PrimitiveSchema => ({ type: 'primitive', primitiveType: 'number', required: true }),
requiredBoolean: (): PrimitiveSchema => ({ type: 'primitive', primitiveType: 'boolean', required: true }),
} as const;
export const ReferenceSchemas = {
to: (targetSchema: SchemaID, maxDepth = 3): ReferenceSchema => ({
type: 'reference',
targetSchema,
maxDepth
}),
required: (targetSchema: SchemaID, maxDepth = 3): ReferenceSchema => ({
type: 'reference',
targetSchema,
maxDepth,
required: true
})
} as const;
export const ArraySchemas = {
of: (itemSchema: PropertySchema, maxItems?: number): ArraySchema => ({
type: 'array',
itemSchema,
maxItems
}),
required: (itemSchema: PropertySchema, maxItems?: number): ArraySchema => ({
type: 'array',
itemSchema,
maxItems,
required: true
})
} as const;
// Schema builder for fluent API
export class SchemaBuilder {
private schema: Partial<ObjectSchema> = {};
static create(id: SchemaID): SchemaBuilder {
const builder = new SchemaBuilder();
builder.schema.id = id;
builder.schema.properties = {};
return builder;
}
name(name: string): SchemaBuilder {
this.schema.name = name;
return this;
}
description(description: string): SchemaBuilder {
this.schema.description = description;
return this;
}
property(propertyId: PropertyID, schema: PropertySchema): SchemaBuilder {
if (!this.schema.properties) this.schema.properties = {};
this.schema.properties[propertyId] = schema;
return this;
}
required(...propertyIds: PropertyID[]): SchemaBuilder {
this.schema.requiredProperties = [
...(this.schema.requiredProperties || []),
...propertyIds
];
return this;
}
additionalProperties(allowed = true): SchemaBuilder {
this.schema.additionalProperties = allowed;
return this;
}
build(): ObjectSchema {
if (!this.schema.id || !this.schema.name) {
throw new Error('Schema must have id and name');
}
return this.schema as ObjectSchema;
}
}
// Common schema patterns
export const CommonSchemas = {
Review

It looks like CommonSchemas is only used in __tests__ and should probably be moved there

It looks like `CommonSchemas` is only used in `__tests__` and should probably be moved there
// User schema with friends references
User: () => SchemaBuilder
.create('user')
.name('User')
.description('A user entity with profile information')
.property('name', PrimitiveSchemas.requiredString())
.property('email', PrimitiveSchemas.string())
.property('age', PrimitiveSchemas.number())
.property('active', PrimitiveSchemas.boolean())
.property('friends', ArraySchemas.of(ReferenceSchemas.to('user-summary', 2)))
.required('name')
.build(),
// User summary schema for references to prevent infinite recursion
UserSummary: () => SchemaBuilder
.create('user-summary')
.name('User Summary')
.description('Abbreviated user information for references')
.property('name', PrimitiveSchemas.requiredString())
.property('email', PrimitiveSchemas.string())
.required('name')
.additionalProperties(false)
.build(),
// Document schema
Document: () => SchemaBuilder
.create('document')
.name('Document')
.description('A document with metadata')
.property('title', PrimitiveSchemas.requiredString())
.property('content', PrimitiveSchemas.string())
.property('author', ReferenceSchemas.required('user-summary'))
.property('tags', ArraySchemas.of(PrimitiveSchemas.string()))
.property('created', PrimitiveSchemas.requiredNumber())
.property('published', PrimitiveSchemas.boolean())
.required('title', 'author', 'created')
.build()
} as const;
/**
* Context for tracking resolution state during nested object resolution
* Prevents circular references and manages depth tracking
*/
export class ResolutionContext {
private visited: Set<string> = new Set();
constructor(
public readonly maxDepth: number,
public readonly currentDepth: number = 0
) {}
/**
* Create a new context with incremented depth
*/
withDepth(depth: number): ResolutionContext {
return new ResolutionContext(this.maxDepth, depth);
}
/**
* Check if entity/schema combination has been visited
*/
hasVisited(entityId: string, schemaId: SchemaID): boolean {
const key = `${entityId}:${schemaId}`;
return this.visited.has(key);
}
/**
* Mark entity/schema combination as visited
*/
visit(entityId: string, schemaId: SchemaID): void {
const key = `${entityId}:${schemaId}`;
this.visited.add(key);
}
/**
* Remove entity/schema combination from visited set
*/
unvisit(entityId: string, schemaId: SchemaID): void {
const key = `${entityId}:${schemaId}`;
this.visited.delete(key);
}
/**
* Check if we're at maximum depth
*/
isAtMaxDepth(): boolean {
return this.currentDepth >= this.maxDepth;
}
}

87
src/storage/factory.ts Normal file
View File

@ -0,0 +1,87 @@
import { DeltaStorage, DeltaQueryStorage, StorageConfig } from './interface';
import { MemoryDeltaStorage } from './memory';
import { LevelDBDeltaStorage } from './leveldb';
/**
* Factory for creating delta storage instances based on configuration
*/
export class StorageFactory {
/**
* Create a storage instance based on configuration
*/
static create(config: StorageConfig): DeltaQueryStorage {
switch (config.type) {
case 'memory':
return new MemoryDeltaStorage();
case 'leveldb': {
const dbPath = config.path || './data/deltas';
return new LevelDBDeltaStorage(dbPath);
}
case 'sqlite':
// TODO: Implement SQLite storage
throw new Error('SQLite storage not yet implemented');
case 'postgres':
// TODO: Implement PostgreSQL storage
throw new Error('PostgreSQL storage not yet implemented');
default:
throw new Error(`Unknown storage type: ${config.type}`);
}
}
/**
* Create a memory storage instance (convenience method)
*/
static createMemory(): DeltaQueryStorage {
return new MemoryDeltaStorage();
}
/**
* Create a LevelDB storage instance (convenience method)
*/
static createLevelDB(path: string = './data/deltas'): DeltaQueryStorage {
return new LevelDBDeltaStorage(path);
}
/**
* Migrate data from one storage backend to another
*/
static async migrate(
source: DeltaStorage,
target: DeltaStorage,
options: { batchSize?: number } = {}
): Promise<void> {
const batchSize = options.batchSize || 1000;
console.log('Starting storage migration...');
const allDeltas = await source.getAllDeltas();
console.log(`Found ${allDeltas.length} deltas to migrate`);
// Migrate in batches to avoid memory issues
for (let i = 0; i < allDeltas.length; i += batchSize) {
const batch = allDeltas.slice(i, i + batchSize);
for (const delta of batch) {
await target.storeDelta(delta);
}
console.log(`Migrated ${Math.min(i + batchSize, allDeltas.length)} / ${allDeltas.length} deltas`);
}
console.log('Migration completed successfully');
// Verify migration
const sourceStats = await source.getStats();
const targetStats = await target.getStats();
if (sourceStats.totalDeltas !== targetStats.totalDeltas) {
throw new Error(`Migration verification failed: source has ${sourceStats.totalDeltas} deltas, target has ${targetStats.totalDeltas}`);
}
console.log(`Migration verified: ${targetStats.totalDeltas} deltas migrated successfully`);
}
}

5
src/storage/index.ts Normal file
View File

@ -0,0 +1,5 @@
export * from './interface';
export * from './memory';
export * from './leveldb';
export * from './factory';
export * from './store';

91
src/storage/interface.ts Normal file
View File

@ -0,0 +1,91 @@
import { Delta, DeltaID, DeltaFilter } from '../core/delta';
import { DomainEntityID } from '../core/types';
/**
* Abstract interface for delta storage backends
* Supports both in-memory and persistent storage implementations
*/
export interface DeltaStorage {
/**
* Store a delta
*/
storeDelta(delta: Delta): Promise<void>;
/**
* Get a delta by ID
*/
getDelta(id: DeltaID): Promise<Delta | null>;
/**
* Get all deltas (optionally filtered)
*/
getAllDeltas(filter?: DeltaFilter): Promise<Delta[]>;
/**
* Get deltas that reference a specific entity
*/
getDeltasForEntity(entityId: DomainEntityID): Promise<Delta[]>;
/**
* Get deltas by target context (property)
*/
getDeltasByContext(entityId: DomainEntityID, context: string): Promise<Delta[]>;
/**
* Get statistics about stored deltas
*/
getStats(): Promise<StorageStats>;
/**
* Clean up resources
*/
close(): Promise<void>;
}
export interface StorageStats {
totalDeltas: number;
totalEntities: number;
storageSize?: number; // bytes for persistent storage
oldestDelta?: number; // timestamp
newestDelta?: number; // timestamp
}
/**
* Query interface for more advanced delta queries
*/
export interface DeltaQueryStorage extends DeltaStorage {
/**
* Query deltas with more complex criteria
*/
queryDeltas(query: DeltaQuery): Promise<Delta[]>;
/**
* Count deltas matching criteria without fetching them
*/
countDeltas(query: DeltaQuery): Promise<number>;
/**
* Create an index for faster queries (optional optimization)
*/
createIndex?(fields: string[]): Promise<void>;
}
export interface DeltaQuery {
creator?: string;
host?: string;
timeCreatedAfter?: number;
timeCreatedBefore?: number;
targetEntities?: DomainEntityID[];
contexts?: string[];
limit?: number;
offset?: number;
}
/**
* Configuration for different storage backends
*/
export interface StorageConfig {
type: 'memory' | 'leveldb' | 'sqlite' | 'postgres';
path?: string; // for file-based storage
options?: Record<string, unknown>;
}

336
src/storage/leveldb.ts Normal file
View File

@ -0,0 +1,336 @@
import Debug from 'debug';
import { Level } from 'level';
import { Delta, DeltaID, DeltaFilter } from '../core/delta';
import { DomainEntityID } from '../core/types';
import { DeltaQueryStorage, DeltaQuery, StorageStats } from './interface';
const debug = Debug('rz:storage:leveldb');
/**
* LevelDB-based delta storage implementation
* Provides persistent storage with efficient lookups
*/
export class LevelDBDeltaStorage implements DeltaQueryStorage {
private db: Level<string, string>;
private readonly dbPath: string;
constructor(dbPath: string = './data/deltas') {
this.dbPath = dbPath;
this.db = new Level<string, string>(dbPath);
debug(`Initialized LevelDB storage at ${dbPath}`);
}
async open(): Promise<void> {
if (!this.db.status.includes('open')) {
await this.db.open();
}
}
private async ensureOpen(): Promise<void> {
if (this.db.status !== 'open') {
await this.db.open();
}
}
async storeDelta(delta: Delta): Promise<void> {
await this.ensureOpen();
debug(`Storing delta ${delta.id} to LevelDB`);
const batch = this.db.batch();
// Store the main delta record
batch.put(`delta:${delta.id}`, JSON.stringify(delta));
// Create index entries for efficient lookups
// Index by creation time for temporal queries
batch.put(`time:${delta.timeCreated.toString().padStart(16, '0')}:${delta.id}`, delta.id);
// Index by creator
batch.put(`creator:${delta.creator}:${delta.id}`, delta.id);
// Index by host
batch.put(`host:${delta.host}:${delta.id}`, delta.id);
// Index by entity and context for efficient entity queries
for (const pointer of delta.pointers) {
if (typeof pointer.target === 'string' && pointer.targetContext) {
const entityId = pointer.target;
const context = pointer.targetContext;
// Entity index: entity:entityId:deltaId -> deltaId
batch.put(`entity:${entityId}:${delta.id}`, delta.id);
// Context index: context:entityId:context:deltaId -> deltaId
batch.put(`context:${entityId}:${context}:${delta.id}`, delta.id);
}
}
await batch.write();
}
async getDelta(id: DeltaID): Promise<Delta | null> {
await this.ensureOpen();
try {
const deltaJson = await this.db.get(`delta:${id}`);
// Handle case where LevelDB returns string "undefined" for missing keys
if (deltaJson === 'undefined' || deltaJson === undefined) {
return null;
}
return JSON.parse(deltaJson);
} catch (error) {
if ((error as { code?: string }).code === 'LEVEL_NOT_FOUND') {
return null;
}
throw error;
}
}
async getAllDeltas(filter?: DeltaFilter): Promise<Delta[]> {
await this.ensureOpen();
const deltas: Delta[] = [];
// Iterate through all delta records
for await (const [_key, value] of this.db.iterator({
gte: 'delta:',
lt: 'delta:\xFF'
})) {
try {
const delta = JSON.parse(value);
// Apply filter if provided
if (!filter || filter(delta)) {
deltas.push(delta);
}
} catch (error) {
debug(`Error parsing delta from key ${_key}:`, error);
}
}
return deltas;
}
async getDeltasForEntity(entityId: DomainEntityID): Promise<Delta[]> {
await this.ensureOpen();
const deltaIds: string[] = [];
// Use entity index to find all deltas for this entity
for await (const [_key, deltaId] of this.db.iterator({
gte: `entity:${entityId}:`,
lt: `entity:${entityId}:\xFF`
})) {
deltaIds.push(deltaId);
}
// Fetch the actual deltas
const deltas: Delta[] = [];
for (const deltaId of deltaIds) {
const delta = await this.getDelta(deltaId);
if (delta) {
deltas.push(delta);
}
}
return deltas;
}
async getDeltasByContext(entityId: DomainEntityID, context: string): Promise<Delta[]> {
await this.ensureOpen();
const deltaIds: string[] = [];
// Use context index to find deltas for this specific entity+context
for await (const [_key, deltaId] of this.db.iterator({
gte: `context:${entityId}:${context}:`,
lt: `context:${entityId}:${context}:\xFF`
})) {
deltaIds.push(deltaId);
}
// Fetch the actual deltas
const deltas: Delta[] = [];
for (const deltaId of deltaIds) {
const delta = await this.getDelta(deltaId);
if (delta) {
deltas.push(delta);
}
}
return deltas;
}
async queryDeltas(query: DeltaQuery): Promise<Delta[]> {
await this.ensureOpen();
let candidateDeltaIds: Set<string> | null = null;
// Use indexes to narrow down candidates efficiently
if (query.creator) {
const creatorDeltaIds = new Set<string>();
for await (const [_key, deltaId] of this.db.iterator({
gte: `creator:${query.creator}:`,
lt: `creator:${query.creator}:\xFF`
})) {
creatorDeltaIds.add(deltaId);
}
candidateDeltaIds = this.intersectSets(candidateDeltaIds, creatorDeltaIds);
}
if (query.host) {
const hostDeltaIds = new Set<string>();
for await (const [_key, deltaId] of this.db.iterator({
gte: `host:${query.host}:`,
lt: `host:${query.host}:\xFF`
})) {
hostDeltaIds.add(deltaId);
}
candidateDeltaIds = this.intersectSets(candidateDeltaIds, hostDeltaIds);
}
if (query.targetEntities && query.targetEntities.length > 0) {
const entityDeltaIds = new Set<string>();
for (const entityId of query.targetEntities) {
for await (const [_key, deltaId] of this.db.iterator({
gte: `entity:${entityId}:`,
lt: `entity:${entityId}:\xFF`
})) {
entityDeltaIds.add(deltaId);
}
}
candidateDeltaIds = this.intersectSets(candidateDeltaIds, entityDeltaIds);
}
// If no index queries were used, scan all deltas
if (candidateDeltaIds === null) {
candidateDeltaIds = new Set<string>();
for await (const [key, _value] of this.db.iterator({
gte: 'delta:',
lt: 'delta:\xFF'
})) {
const deltaId = key.substring(6); // Remove 'delta:' prefix
candidateDeltaIds.add(deltaId);
}
}
// Fetch and filter the candidate deltas
const results: Delta[] = [];
for (const deltaId of candidateDeltaIds) {
const delta = await this.getDelta(deltaId);
if (!delta) continue;
// Apply additional filters that couldn't be done via indexes
if (query.timeCreatedAfter && delta.timeCreated < query.timeCreatedAfter) continue;
if (query.timeCreatedBefore && delta.timeCreated > query.timeCreatedBefore) continue;
if (query.contexts && query.contexts.length > 0) {
const hasMatchingContext = delta.pointers.some(p =>
p.targetContext && query.contexts!.includes(p.targetContext)
);
if (!hasMatchingContext) continue;
}
results.push(delta);
}
// Sort by creation time
results.sort((a, b) => a.timeCreated - b.timeCreated);
// Apply pagination
let finalResults = results;
if (query.offset) {
finalResults = finalResults.slice(query.offset);
}
if (query.limit) {
finalResults = finalResults.slice(0, query.limit);
}
return finalResults;
}
async countDeltas(query: DeltaQuery): Promise<number> {
// For count queries, we can be more efficient by not fetching full delta objects
const results = await this.queryDeltas({ ...query, limit: undefined, offset: undefined });
return results.length;
}
async getStats(): Promise<StorageStats> {
await this.ensureOpen();
let totalDeltas = 0;
const entities = new Set<DomainEntityID>();
let oldestDelta: number | undefined;
let newestDelta: number | undefined;
// Count deltas and track entities
for await (const [_key, value] of this.db.iterator({
gte: 'delta:',
lt: 'delta:\xFF'
})) {
totalDeltas++;
try {
const delta: Delta = JSON.parse(value);
// Track entities
for (const pointer of delta.pointers) {
if (typeof pointer.target === 'string' && pointer.targetContext) {
entities.add(pointer.target);
}
}
// Track time range
if (!oldestDelta || delta.timeCreated < oldestDelta) {
oldestDelta = delta.timeCreated;
}
if (!newestDelta || delta.timeCreated > newestDelta) {
newestDelta = delta.timeCreated;
}
} catch (error) {
debug(`Error parsing delta for stats from key ${_key}:`, error);
}
}
return {
totalDeltas,
totalEntities: entities.size,
oldestDelta,
newestDelta
// Note: LevelDB doesn't easily expose storage size, would need filesystem queries
};
}
async close(): Promise<void> {
debug('Closing LevelDB storage');
await this.db.close();
}
// Utility method for set intersection
private intersectSets(setA: Set<string> | null, setB: Set<string>): Set<string> {
if (setA === null) return setB;
const result = new Set<string>();
for (const item of setA) {
if (setB.has(item)) {
result.add(item);
}
}
return result;
}
// LevelDB-specific methods
async clearAll(): Promise<void> {
await this.ensureOpen();
debug('Clearing all data from LevelDB');
await this.db.clear();
}
async compact(): Promise<void> {
await this.ensureOpen();
debug('Compacting LevelDB');
// LevelDB compaction happens automatically, but we can trigger it
// by iterating through all keys (this is a simple approach)
for await (const [_key] of this.db.iterator()) {
// Just iterating triggers compaction
}
}
}

190
src/storage/memory.ts Normal file
View File

@ -0,0 +1,190 @@
import Debug from 'debug';
import { Delta, DeltaID, DeltaFilter } from '../core/delta';
import { DomainEntityID } from '../core/types';
import { DeltaQueryStorage, DeltaQuery, StorageStats } from './interface';
const debug = Debug('rz:storage:memory');
/**
* In-memory delta storage implementation
* Fast but non-persistent, suitable for development and testing
*/
export class MemoryDeltaStorage implements DeltaQueryStorage {
private deltas = new Map<DeltaID, Delta>();
private entityIndex = new Map<DomainEntityID, Set<DeltaID>>();
private contextIndex = new Map<string, Set<DeltaID>>(); // entityId:context -> deltaIds
async storeDelta(delta: Delta): Promise<void> {
debug(`Storing delta ${delta.id}`);
// Store the delta
this.deltas.set(delta.id, delta);
// Update entity index
for (const pointer of delta.pointers) {
if (typeof pointer.target === 'string' && pointer.targetContext) {
const entityId = pointer.target;
// Add to entity index
if (!this.entityIndex.has(entityId)) {
this.entityIndex.set(entityId, new Set());
}
this.entityIndex.get(entityId)!.add(delta.id);
// Add to context index
const contextKey = `${entityId}:${pointer.targetContext}`;
if (!this.contextIndex.has(contextKey)) {
this.contextIndex.set(contextKey, new Set());
}
this.contextIndex.get(contextKey)!.add(delta.id);
}
}
}
async getDelta(id: DeltaID): Promise<Delta | null> {
return this.deltas.get(id) || null;
}
async getAllDeltas(filter?: DeltaFilter): Promise<Delta[]> {
let results = Array.from(this.deltas.values());
if (filter) {
results = results.filter(filter);
}
return results;
}
async getDeltasForEntity(entityId: DomainEntityID): Promise<Delta[]> {
const deltaIds = this.entityIndex.get(entityId);
if (!deltaIds) return [];
const results: Delta[] = [];
for (const deltaId of deltaIds) {
const delta = this.deltas.get(deltaId);
if (delta) {
results.push(delta);
}
}
return results;
}
async getDeltasByContext(entityId: DomainEntityID, context: string): Promise<Delta[]> {
const contextKey = `${entityId}:${context}`;
const deltaIds = this.contextIndex.get(contextKey);
if (!deltaIds) return [];
const results: Delta[] = [];
for (const deltaId of deltaIds) {
const delta = this.deltas.get(deltaId);
if (delta) {
results.push(delta);
}
}
return results;
}
async queryDeltas(query: DeltaQuery): Promise<Delta[]> {
let results = Array.from(this.deltas.values());
// Apply filters
if (query.creator) {
results = results.filter(d => d.creator === query.creator);
}
if (query.host) {
results = results.filter(d => d.host === query.host);
}
if (query.timeCreatedAfter) {
results = results.filter(d => d.timeCreated >= query.timeCreatedAfter!);
}
if (query.timeCreatedBefore) {
results = results.filter(d => d.timeCreated <= query.timeCreatedBefore!);
}
if (query.targetEntities && query.targetEntities.length > 0) {
const targetSet = new Set(query.targetEntities);
results = results.filter(d =>
d.pointers.some(p => typeof p.target === 'string' && targetSet.has(p.target))
);
}
if (query.contexts && query.contexts.length > 0) {
const contextSet = new Set(query.contexts);
results = results.filter(d =>
d.pointers.some(p => p.targetContext && contextSet.has(p.targetContext))
);
}
// Sort by creation time
results.sort((a, b) => a.timeCreated - b.timeCreated);
// Apply pagination
if (query.offset) {
results = results.slice(query.offset);
}
if (query.limit) {
results = results.slice(0, query.limit);
}
return results;
}
async countDeltas(query: DeltaQuery): Promise<number> {
const results = await this.queryDeltas({ ...query, limit: undefined, offset: undefined });
return results.length;
}
async getStats(): Promise<StorageStats> {
const deltas = Array.from(this.deltas.values());
const entities = new Set<DomainEntityID>();
let oldestDelta: number | undefined;
let newestDelta: number | undefined;
for (const delta of deltas) {
// Track entities
for (const pointer of delta.pointers) {
if (typeof pointer.target === 'string' && pointer.targetContext) {
entities.add(pointer.target);
}
}
// Track time range
if (!oldestDelta || delta.timeCreated < oldestDelta) {
oldestDelta = delta.timeCreated;
}
if (!newestDelta || delta.timeCreated > newestDelta) {
newestDelta = delta.timeCreated;
}
}
return {
totalDeltas: this.deltas.size,
totalEntities: entities.size,
oldestDelta,
newestDelta
};
}
async close(): Promise<void> {
debug('Closing memory storage');
this.deltas.clear();
this.entityIndex.clear();
this.contextIndex.clear();
}
// Memory-specific methods for inspection
getInternalState() {
return {
deltasCount: this.deltas.size,
entitiesCount: this.entityIndex.size,
contextsCount: this.contextIndex.size
};
}
}

View File

@ -1,5 +1,5 @@
import { Level } from 'level';
import { LEVEL_DB_DIR } from './config';
import { LEVEL_DB_DIR } from '../config';
import path from 'path';
function newStore(name: string): Level {

3
src/views/index.ts Normal file
View File

@ -0,0 +1,3 @@
export * from './lossless';
export * from './lossy';
export * from './resolvers';

395
src/views/lossless.ts Normal file
View File

@ -0,0 +1,395 @@
// Deltas target entities.
// We can maintain a record of all the targeted entities, and the deltas that targeted them
import Debug from 'debug';
import EventEmitter from 'events';
import {Delta, DeltaFilter, DeltaID, DeltaNetworkImageV1} from '../core/delta';
import {RhizomeNode} from '../node';
import {Transactions} from '../features/transactions';
import {DomainEntityID, PropertyID, PropertyTypes, TransactionID, ViewMany} from "../core/types";
import {Negation} from '../features/negation';
import {NegationHelper} from '../features/negation';
const debug = Debug('rz:lossless');
export type CollapsedPointer = {[key: PropertyID]: PropertyTypes};
export type CollapsedDelta = Omit<DeltaNetworkImageV1, 'pointers'> & {
pointers: CollapsedPointer[];
};
export type LosslessViewOne = {
id: DomainEntityID,
referencedAs: string[];
propertyDeltas: {
[key: PropertyID]: CollapsedDelta[]
}
};
export type LosslessViewMany = ViewMany<LosslessViewOne>;
class LosslessEntityMap extends Map<DomainEntityID, LosslessEntity> {};
class LosslessEntity {
properties = new Map<PropertyID, Set<Delta>>();
constructor(readonly lossless: Lossless, readonly id: DomainEntityID) {}
addDelta(delta: Delta) {
const targetContexts = delta.pointers
.filter(({target}) => target === this.id)
.map(({targetContext}) => targetContext)
.filter((targetContext) => typeof targetContext === 'string');
for (const targetContext of targetContexts) {
let propertyDeltas = this.properties.get(targetContext);
if (!propertyDeltas) {
propertyDeltas = new Set<Delta>();
this.properties.set(targetContext, propertyDeltas);
}
propertyDeltas.add(delta);
debug(`[${this.lossless.rhizomeNode.config.peerId}]`, `entity ${this.id} added delta:`, JSON.stringify(delta));
}
}
toJSON() {
const properties: {[key: PropertyID]: number} = {};
for (const [key, deltas] of this.properties.entries()) {
properties[key] = deltas.size;
}
return {
id: this.id,
properties
};
}
}
export class Lossless {
domainEntities = new LosslessEntityMap();
transactions: Transactions;
referencedAs = new Map<string, Set<DomainEntityID>>();
eventStream = new EventEmitter();
// Track all deltas by ID for negation processing
private allDeltas = new Map<DeltaID, Delta>();
// Track which entities are affected by each delta
private deltaToEntities = new Map<DeltaID, Set<DomainEntityID>>();
constructor(readonly rhizomeNode: RhizomeNode) {
this.transactions = new Transactions(this);
this.transactions.eventStream.on("completed", (transactionId, deltaIds) => {
debug(`[${this.rhizomeNode.config.peerId}]`, `Completed transaction ${transactionId}`);
const transaction = this.transactions.get(transactionId);
if (!transaction) return;
for (const id of transaction.entityIds) {
this.eventStream.emit("updated", id, deltaIds);
}
});
}
ingestDelta(delta: Delta): TransactionID | undefined {
// Store delta for negation processing
this.allDeltas.set(delta.id, delta);
let targets: string[] = [];
// Handle negation deltas specially
if (NegationHelper.isNegationDelta(delta)) {
const negatedDeltaId = NegationHelper.getNegatedDeltaId(delta);
if (negatedDeltaId) {
// Find which entities were affected by the negated delta
const affectedEntities = this.deltaToEntities.get(negatedDeltaId);
if (affectedEntities) {
targets = Array.from(affectedEntities);
// Track which entities this negation delta affects
this.deltaToEntities.set(delta.id, affectedEntities);
// Add the negation delta to all affected entities
for (const entityId of affectedEntities) {
let ent = this.domainEntities.get(entityId);
if (!ent) {
ent = new LosslessEntity(this, entityId);
this.domainEntities.set(entityId, ent);
}
// Add negation delta to the entity
// For negation deltas, we need to add them to a special property
// since they don't directly target the entity
let negationDeltas = ent.properties.get('_negations');
if (!negationDeltas) {
negationDeltas = new Set<Delta>();
ent.properties.set('_negations', negationDeltas);
}
negationDeltas.add(delta);
}
}
}
} else {
// Regular delta processing
targets = delta.pointers
.filter(({targetContext}) => !!targetContext)
.map(({target}) => target)
.filter((target) => typeof target === 'string');
// Track which entities this delta affects
this.deltaToEntities.set(delta.id, new Set(targets));
for (const target of targets) {
let ent = this.domainEntities.get(target);
if (!ent) {
ent = new LosslessEntity(this, target);
this.domainEntities.set(target, ent);
}
ent.addDelta(delta);
}
}
for (const {target, localContext} of delta.pointers) {
if (typeof target === "string" && this.domainEntities.has(target)) {
if (this.domainEntities.has(target)) {
let referencedAs = this.referencedAs.get(localContext);
if (!referencedAs) {
referencedAs = new Set<string>();
this.referencedAs.set(localContext, referencedAs);
}
referencedAs.add(target);
}
}
}
const transactionId = this.transactions.ingestDelta(delta, targets);
if (!transactionId) {
// No transaction -- we can issue an update event immediately
for (const id of targets) {
this.eventStream.emit("updated", id, [delta.id]);
}
}
return transactionId;
}
viewSpecific(entityId: DomainEntityID, deltaIds: DeltaID[], deltaFilter?: DeltaFilter): LosslessViewOne | undefined {
const combinedFilter = (delta: Delta) => {
if (!deltaIds.includes(delta.id)) {
debug(`[${this.rhizomeNode.config.peerId}]`, `Excluding delta ${delta.id} because it's not in the requested list of deltas`);
return false;
}
if (!deltaFilter) return true;
return deltaFilter(delta);
};
const res = this.compose([entityId], (delta) => combinedFilter(delta));
return res[entityId];
}
decompose(view: LosslessViewOne): Delta[] {
const allDeltas: Delta[] = [];
const seenDeltaIds = new Set<DeltaID>();
// Collect all deltas from all properties
for (const [propertyId, deltas] of Object.entries(view.propertyDeltas)) {
for (const delta of deltas) {
if (!seenDeltaIds.has(delta.id)) {
seenDeltaIds.add(delta.id);
// Convert CollapsedDelta back to Delta
const fullDelta = new Delta({
id: delta.id,
creator: delta.creator,
host: delta.host,
timeCreated: delta.timeCreated,
pointers: delta.pointers.map(pointer => {
// Convert back to V1 pointer format for Delta constructor
const pointerEntries = Object.entries(pointer);
if (pointerEntries.length === 1) {
const [localContext, target] = pointerEntries[0];
if (typeof target === 'string' && this.domainEntities.has(target)) {
// This is a reference pointer to an entity
// The targetContext is the property ID this delta appears under
return { localContext, target, targetContext: propertyId };
} else {
// Scalar pointer
return { localContext, target: target as PropertyTypes };
}
}
// Fallback for unexpected pointer structure
return { localContext: 'unknown', target: 'unknown' };
})
});
allDeltas.push(fullDelta);
}
}
}
return allDeltas;
}
// Backward compatibility alias
view(entityIds?: DomainEntityID[], deltaFilter?: DeltaFilter): LosslessViewMany {
return this.compose(entityIds, deltaFilter);
}
compose(entityIds?: DomainEntityID[], deltaFilter?: DeltaFilter): LosslessViewMany {
const view: LosslessViewMany = {};
entityIds = entityIds ?? Array.from(this.domainEntities.keys());
for (const id of entityIds) {
const ent = this.domainEntities.get(id);
if (!ent) continue;
const referencedAs = new Set<string>();
const propertyDeltas: {
[key: PropertyID]: CollapsedDelta[]
} = {};
let hasVisibleDeltas = false;
// First, collect all deltas for this entity to properly apply negations
const allEntityDeltas: Delta[] = [];
for (const deltas of ent.properties.values()) {
allEntityDeltas.push(...Array.from(deltas));
}
// Apply negation filtering to all deltas for this entity
const nonNegatedDeltas = Negation.filterNegatedDeltas(allEntityDeltas);
const nonNegatedDeltaIds = new Set(nonNegatedDeltas.map(d => d.id));
for (const [key, deltas] of ent.properties.entries()) {
// Filter deltas for this property based on negation status
const filteredDeltas = Array.from(deltas).filter(delta => nonNegatedDeltaIds.has(delta.id));
const visibleDeltas: CollapsedDelta[] = [];
for (const delta of filteredDeltas) {
if (deltaFilter && !deltaFilter(delta)) {
continue;
}
// If this delta is part of a transaction,
// we need to be able to wait for the whole transaction.
if (delta.transactionId) {
if (!this.transactions.isComplete(delta.transactionId)) {
debug(`[${this.rhizomeNode.config.peerId}]`, `Excluding delta ${delta.id} because transaction ${delta.transactionId} is not completed`);
continue;
}
}
const pointers: CollapsedPointer[] = [];
for (const {localContext, target} of delta.pointers) {
pointers.push({[localContext]: target});
if (target === ent.id) {
referencedAs.add(localContext);
}
}
visibleDeltas.push({
...delta,
pointers
});
hasVisibleDeltas = true;
}
if (visibleDeltas.length > 0) {
propertyDeltas[key] = visibleDeltas;
}
}
// Only include entity in view if it has visible deltas
if (hasVisibleDeltas) {
view[ent.id] = {
id: ent.id,
referencedAs: Array.from(referencedAs.values()),
propertyDeltas
};
}
}
return view;
}
// Get negation statistics for an entity
getNegationStats(entityId: DomainEntityID): {
totalDeltas: number;
negationDeltas: number;
negatedDeltas: number;
effectiveDeltas: number;
negationsByProperty: { [key: PropertyID]: { negated: number; total: number } };
} {
const ent = this.domainEntities.get(entityId);
if (!ent) {
return {
totalDeltas: 0,
negationDeltas: 0,
negatedDeltas: 0,
effectiveDeltas: 0,
negationsByProperty: {}
};
}
// Get all deltas for this entity, including negation deltas
const allEntityDeltas: Delta[] = [];
for (const deltas of ent.properties.values()) {
allEntityDeltas.push(...Array.from(deltas));
}
let totalDeltas = 0;
let totalNegationDeltas = 0;
let totalNegatedDeltas = 0;
let totalEffectiveDeltas = 0;
const negationsByProperty: { [key: PropertyID]: { negated: number; total: number } } = {};
// Get all negation deltas for this entity
const negationDeltas = this.getNegationDeltas(entityId);
const negatedDeltaIds = new Set<DeltaID>();
for (const negDelta of negationDeltas) {
const negatedId = NegationHelper.getNegatedDeltaId(negDelta);
if (negatedId) {
negatedDeltaIds.add(negatedId);
}
}
for (const [property, deltas] of ent.properties.entries()) {
// Skip the special _negations property in the per-property stats
if (property === '_negations') {
totalDeltas += deltas.size;
totalNegationDeltas += deltas.size;
continue;
}
const deltaArray = Array.from(deltas);
const propertyNegatedCount = deltaArray.filter(d => negatedDeltaIds.has(d.id)).length;
const propertyTotal = deltaArray.length;
totalDeltas += propertyTotal;
totalNegatedDeltas += propertyNegatedCount;
totalEffectiveDeltas += (propertyTotal - propertyNegatedCount);
negationsByProperty[property] = {
negated: propertyNegatedCount,
total: propertyTotal
};
}
return {
totalDeltas,
negationDeltas: totalNegationDeltas,
negatedDeltas: totalNegatedDeltas,
effectiveDeltas: totalEffectiveDeltas,
negationsByProperty
};
}
// Get all negation deltas for an entity
getNegationDeltas(entityId: DomainEntityID): Delta[] {
const ent = this.domainEntities.get(entityId);
if (!ent) return [];
const negationProperty = ent.properties.get('_negations');
if (!negationProperty) return [];
return Array.from(negationProperty);
}
// TODO: point-in-time queries
}

View File

@ -3,9 +3,9 @@
// into various possible "lossy" views that combine or exclude some information.
import Debug from 'debug';
import {DeltaFilter, DeltaID} from "./delta";
import {DeltaFilter, DeltaID} from "../core/delta";
import {Lossless, LosslessViewOne} from "./lossless";
import {DomainEntityID} from "./types";
import {DomainEntityID} from "../core/types";
const debug = Debug('rz:lossy');
// We support incremental updates of lossy models.

View File

@ -0,0 +1,189 @@
import { EntityProperties } from "../../core/entity";
import { Lossless, LosslessViewOne } from "../lossless";
import { Lossy } from '../lossy';
import { DomainEntityID, PropertyID, ViewMany } from "../../core/types";
import { valueFromCollapsedDelta } from "./last-write-wins";
export type AggregationType = 'min' | 'max' | 'sum' | 'average' | 'count';
export type AggregationConfig = {
[propertyId: PropertyID]: AggregationType;
};
type AggregatedProperty = {
values: number[];
type: AggregationType;
result?: number;
};
type AggregatedProperties = {
[key: PropertyID]: AggregatedProperty;
};
export type AggregatedViewOne = {
id: DomainEntityID;
properties: AggregatedProperties;
};
export type AggregatedViewMany = ViewMany<AggregatedViewOne>;
type ResolvedAggregatedViewOne = {
id: DomainEntityID;
properties: EntityProperties;
};
type ResolvedAggregatedViewMany = ViewMany<ResolvedAggregatedViewOne>;
type Accumulator = AggregatedViewMany;
type Result = ResolvedAggregatedViewMany;
function aggregateValues(values: number[], type: AggregationType): number {
if (values.length === 0) return 0;
switch (type) {
case 'min':
return Math.min(...values);
case 'max':
return Math.max(...values);
case 'sum':
return values.reduce((sum, val) => sum + val, 0);
case 'average':
return values.reduce((sum, val) => sum + val, 0) / values.length;
case 'count':
return values.length;
default:
throw new Error(`Unknown aggregation type: ${type}`);
}
}
export class AggregationResolver extends Lossy<Accumulator, Result> {
constructor(
lossless: Lossless,
private config: AggregationConfig
) {
super(lossless);
}
initializer(): Accumulator {
return {};
}
reducer(acc: Accumulator, cur: LosslessViewOne): Accumulator {
if (!acc[cur.id]) {
acc[cur.id] = { id: cur.id, properties: {} };
}
for (const [propertyId, deltas] of Object.entries(cur.propertyDeltas)) {
const aggregationType = this.config[propertyId];
if (!aggregationType) continue;
if (!acc[cur.id].properties[propertyId]) {
acc[cur.id].properties[propertyId] = {
values: [],
type: aggregationType
};
}
// Extract numeric values from all deltas for this property
const newValues: number[] = [];
for (const delta of deltas || []) {
const value = valueFromCollapsedDelta(propertyId, delta);
if (typeof value === 'number') {
newValues.push(value);
}
}
// Update the values array (avoiding duplicates by clearing and rebuilding)
acc[cur.id].properties[propertyId].values = newValues;
}
return acc;
}
resolver(cur: Accumulator): Result {
const res: Result = {};
for (const [id, entity] of Object.entries(cur)) {
const entityResult: ResolvedAggregatedViewOne = { id, properties: {} };
for (const [propertyId, aggregatedProp] of Object.entries(entity.properties)) {
const result = aggregateValues(aggregatedProp.values, aggregatedProp.type);
entityResult.properties[propertyId] = result;
}
// Only include entities that have at least one aggregated property
if (Object.keys(entityResult.properties).length > 0) {
res[id] = entityResult;
}
}
return res;
}
// Override resolve to build accumulator on-demand if needed
resolve(entityIds?: DomainEntityID[]): Result | undefined {
if (!entityIds) {
entityIds = Array.from(this.lossless.domainEntities.keys());
}
// If we don't have an accumulator, build it from the lossless view
if (!this.accumulator) {
this.accumulator = this.initializer();
// Use the general view method instead of viewSpecific
const fullView = this.lossless.view(entityIds, this.deltaFilter);
for (const entityId of entityIds) {
const losslessViewOne = fullView[entityId];
if (losslessViewOne) {
this.accumulator = this.reducer(this.accumulator, losslessViewOne);
}
}
}
if (!this.accumulator) return undefined;
return this.resolver(this.accumulator);
}
}
// Convenience classes for common aggregation types
export class MinResolver extends AggregationResolver {
constructor(lossless: Lossless, properties: PropertyID[]) {
const config: AggregationConfig = {};
properties.forEach(prop => config[prop] = 'min');
super(lossless, config);
}
}
export class MaxResolver extends AggregationResolver {
constructor(lossless: Lossless, properties: PropertyID[]) {
const config: AggregationConfig = {};
properties.forEach(prop => config[prop] = 'max');
super(lossless, config);
}
}
export class SumResolver extends AggregationResolver {
constructor(lossless: Lossless, properties: PropertyID[]) {
const config: AggregationConfig = {};
properties.forEach(prop => config[prop] = 'sum');
super(lossless, config);
}
}
export class AverageResolver extends AggregationResolver {
constructor(lossless: Lossless, properties: PropertyID[]) {
const config: AggregationConfig = {};
properties.forEach(prop => config[prop] = 'average');
super(lossless, config);
}
}
export class CountResolver extends AggregationResolver {
constructor(lossless: Lossless, properties: PropertyID[]) {
const config: AggregationConfig = {};
properties.forEach(prop => config[prop] = 'count');
super(lossless, config);
}
}

View File

@ -0,0 +1,296 @@
import { EntityProperties } from "../../core/entity";
import { CollapsedDelta, Lossless, LosslessViewOne } from "../lossless";
import { Lossy } from '../lossy';
import { DomainEntityID, PropertyID, PropertyTypes, ViewMany } from "../../core/types";
// Plugin interface for custom resolvers
export interface ResolverPlugin<T = unknown> {
name: string;
// Initialize the state for a property
initialize(): T;
// Process a new value for the property
update(currentState: T, newValue: PropertyTypes, delta: CollapsedDelta): T;
// Resolve the final value from the accumulated state
resolve(state: T): PropertyTypes;
}
// Configuration for custom resolver
export type CustomResolverConfig = {
[propertyId: PropertyID]: ResolverPlugin;
};
type PropertyState = {
plugin: ResolverPlugin;
state: unknown;
};
type EntityState = {
[propertyId: PropertyID]: PropertyState;
};
type CustomResolverAccumulator = {
[entityId: DomainEntityID]: {
id: DomainEntityID;
properties: EntityState;
};
};
type CustomResolverResult = ViewMany<{
id: DomainEntityID;
properties: EntityProperties;
}>;
// Extract value from delta for a specific property
function extractValueFromDelta(propertyId: PropertyID, delta: CollapsedDelta): PropertyTypes | undefined {
for (const pointer of delta.pointers) {
for (const [key, value] of Object.entries(pointer)) {
if (key === propertyId && (typeof value === "string" || typeof value === "number")) {
return value;
}
}
}
return undefined;
}
export class CustomResolver extends Lossy<CustomResolverAccumulator, CustomResolverResult> {
constructor(
lossless: Lossless,
private config: CustomResolverConfig
) {
super(lossless);
}
initializer(): CustomResolverAccumulator {
return {};
}
reducer(acc: CustomResolverAccumulator, cur: LosslessViewOne): CustomResolverAccumulator {
if (!acc[cur.id]) {
acc[cur.id] = { id: cur.id, properties: {} };
}
for (const [propertyId, deltas] of Object.entries(cur.propertyDeltas)) {
const plugin = this.config[propertyId];
if (!plugin) continue;
// Initialize property state if not exists
if (!acc[cur.id].properties[propertyId]) {
acc[cur.id].properties[propertyId] = {
plugin,
state: plugin.initialize()
};
}
const propertyState = acc[cur.id].properties[propertyId];
// Process all deltas for this property
for (const delta of deltas || []) {
const value = extractValueFromDelta(propertyId, delta);
if (value !== undefined) {
propertyState.state = propertyState.plugin.update(propertyState.state, value, delta);
}
}
}
return acc;
}
resolver(cur: CustomResolverAccumulator): CustomResolverResult {
const res: CustomResolverResult = {};
for (const [entityId, entity] of Object.entries(cur)) {
const entityResult: { id: string; properties: EntityProperties } = { id: entityId, properties: {} };
for (const [propertyId, propertyState] of Object.entries(entity.properties)) {
const resolvedValue = propertyState.plugin.resolve(propertyState.state);
entityResult.properties[propertyId] = resolvedValue;
}
// Only include entities that have at least one resolved property
if (Object.keys(entityResult.properties).length > 0) {
res[entityId] = entityResult;
}
}
return res;
}
// Override resolve to build accumulator on-demand if needed
resolve(entityIds?: DomainEntityID[]): CustomResolverResult | undefined {
if (!entityIds) {
entityIds = Array.from(this.lossless.domainEntities.keys());
}
// If we don't have an accumulator, build it from the lossless view
if (!this.accumulator) {
this.accumulator = this.initializer();
const fullView = this.lossless.view(entityIds, this.deltaFilter);
for (const entityId of entityIds) {
const losslessViewOne = fullView[entityId];
if (losslessViewOne) {
this.accumulator = this.reducer(this.accumulator, losslessViewOne);
}
}
}
if (!this.accumulator) return undefined;
return this.resolver(this.accumulator);
}
}
// Built-in plugin implementations
// Last Write Wins plugin
export class LastWriteWinsPlugin implements ResolverPlugin<{ value?: PropertyTypes, timestamp: number }> {
name = 'last-write-wins';
initialize() {
return { timestamp: 0 };
}
update(currentState: { value?: PropertyTypes, timestamp: number }, newValue: PropertyTypes, delta: CollapsedDelta) {
if (delta.timeCreated > currentState.timestamp) {
return {
value: newValue,
timestamp: delta.timeCreated
};
}
return currentState;
}
resolve(state: { value?: PropertyTypes, timestamp: number }): PropertyTypes {
return state.value || '';
}
}
// First Write Wins plugin
export class FirstWriteWinsPlugin implements ResolverPlugin<{ value?: PropertyTypes, timestamp: number }> {
name = 'first-write-wins';
initialize() {
return { timestamp: Infinity };
}
update(currentState: { value?: PropertyTypes, timestamp: number }, newValue: PropertyTypes, delta: CollapsedDelta) {
if (delta.timeCreated < currentState.timestamp) {
return {
value: newValue,
timestamp: delta.timeCreated
};
}
return currentState;
}
resolve(state: { value?: PropertyTypes, timestamp: number }): PropertyTypes {
return state.value || '';
}
}
// Concatenation plugin (for string values)
export class ConcatenationPlugin implements ResolverPlugin<{ values: { value: string, timestamp: number }[] }> {
name = 'concatenation';
constructor(private separator: string = ' ') { }
initialize() {
return { values: [] };
}
update(currentState: { values: { value: string, timestamp: number }[] }, newValue: PropertyTypes, delta: CollapsedDelta) {
if (typeof newValue === 'string') {
// Check if this value already exists (avoid duplicates)
const exists = currentState.values.some(v => v.value === newValue);
if (!exists) {
currentState.values.push({
value: newValue,
timestamp: delta.timeCreated
});
// Sort by timestamp to maintain chronological order
currentState.values.sort((a, b) => a.timestamp - b.timestamp);
}
}
return currentState;
}
resolve(state: { values: { value: string, timestamp: number }[] }): PropertyTypes {
return state.values.map(v => v.value).join(this.separator);
}
}
// Majority vote plugin
export class MajorityVotePlugin implements ResolverPlugin<{ votes: Map<PropertyTypes, number> }> {
name = 'majority-vote';
initialize() {
return { votes: new Map() };
}
update(currentState: { votes: Map<PropertyTypes, number> }, newValue: PropertyTypes, _delta: CollapsedDelta) {
const currentCount = currentState.votes.get(newValue) || 0;
currentState.votes.set(newValue, currentCount + 1);
return currentState;
}
resolve(state: { votes: Map<PropertyTypes, number> }): PropertyTypes {
let maxVotes = 0;
let winner: PropertyTypes = '';
for (const [value, votes] of state.votes.entries()) {
if (votes > maxVotes) {
maxVotes = votes;
winner = value;
}
}
return winner;
}
}
// Numeric min/max plugins
export class MinPlugin implements ResolverPlugin<{ min?: number }> {
name = 'min';
initialize() {
return {};
}
update(currentState: { min?: number }, newValue: PropertyTypes, _delta: CollapsedDelta) {
if (typeof newValue === 'number') {
if (currentState.min === undefined || newValue < currentState.min) {
return { min: newValue };
}
}
return currentState;
}
resolve(state: { min?: number }): PropertyTypes {
return state.min || 0;
Review

I wonder if this is safe -- maybe if no values are found the result should be omitted

I wonder if this is safe -- maybe if no values are found the result should be omitted
}
}
export class MaxPlugin implements ResolverPlugin<{ max?: number }> {
name = 'max';
initialize() {
return {};
}
update(currentState: { max?: number }, newValue: PropertyTypes, _delta: CollapsedDelta) {
if (typeof newValue === 'number') {
if (currentState.max === undefined || newValue > currentState.max) {
return { max: newValue };
}
}
return currentState;
}
resolve(state: { max?: number }): PropertyTypes {
return state.max || 0;
}
}

View File

@ -0,0 +1,4 @@
export * from './aggregation-resolvers';
export * from './custom-resolvers';
export * from './last-write-wins';
export * from './timestamp-resolvers';

View File

@ -1,8 +1,8 @@
// import Debug from 'debug';
import {EntityProperties} from "./entity";
import {CollapsedDelta, LosslessViewOne} from "./lossless";
import {Lossy} from './lossy';
import {DomainEntityID, PropertyID, PropertyTypes, Timestamp, ViewMany} from "./types";
import {EntityProperties} from "../../core/entity";
import {CollapsedDelta, LosslessViewOne} from "../lossless";
import {Lossy} from '../lossy';
import {DomainEntityID, PropertyID, PropertyTypes, Timestamp, ViewMany} from "../../core/types";
// const debug = Debug('rz:lossy:last-write-wins');
type TimestampedProperty = {
@ -105,5 +105,31 @@ export class LastWriteWins extends Lossy<Accumulator, Result> {
return res;
};
// Override resolve to build accumulator on-demand if needed
Review

Maybe this belongs in the Lossy class itself?

Maybe this belongs in the `Lossy` class itself?
resolve(entityIds?: DomainEntityID[]): Result | undefined {
if (!entityIds) {
entityIds = Array.from(this.lossless.domainEntities.keys());
}
// If we don't have an accumulator, build it from the lossless view
if (!this.accumulator) {
this.accumulator = this.initializer();
// Use the general view method
const fullView = this.lossless.view(entityIds, this.deltaFilter);
for (const entityId of entityIds) {
const losslessViewOne = fullView[entityId];
if (losslessViewOne) {
this.accumulator = this.reducer(this.accumulator, losslessViewOne);
}
}
}
if (!this.accumulator) return undefined;
return this.resolver(this.accumulator);
}
}

View File

@ -0,0 +1,177 @@
import { EntityProperties } from "../../core/entity";
import { Lossless, LosslessViewOne } from "../lossless";
import { Lossy } from '../lossy';
import { DomainEntityID, PropertyID, PropertyTypes, Timestamp, ViewMany } from "../../core/types";
import { valueFromCollapsedDelta } from "./last-write-wins";
export type TieBreakingStrategy = 'creator-id' | 'delta-id' | 'host-id' | 'lexicographic';
type TimestampedPropertyWithTieBreaking = {
value: PropertyTypes,
timeUpdated: Timestamp,
creator: string,
deltaId: string,
host: string
};
type TimestampedPropertiesWithTieBreaking = {
[key: PropertyID]: TimestampedPropertyWithTieBreaking
};
export type TimestampedViewOne = {
id: DomainEntityID;
properties: TimestampedPropertiesWithTieBreaking;
};
export type TimestampedViewMany = ViewMany<TimestampedViewOne>;
export type ResolvedTimestampedViewOne = {
id: DomainEntityID;
properties: EntityProperties;
};
export type ResolvedTimestampedViewMany = ViewMany<ResolvedTimestampedViewOne>;
type Accumulator = TimestampedViewMany;
type Result = ResolvedTimestampedViewMany;
function compareWithTieBreaking(
a: TimestampedPropertyWithTieBreaking,
b: TimestampedPropertyWithTieBreaking,
strategy: TieBreakingStrategy
): number {
// First compare by timestamp (most recent wins)
if (a.timeUpdated !== b.timeUpdated) {
return a.timeUpdated - b.timeUpdated;
}
// If timestamps are equal, use tie-breaking strategy
switch (strategy) {
case 'creator-id':
return a.creator.localeCompare(b.creator);
case 'delta-id':
return a.deltaId.localeCompare(b.deltaId);
case 'host-id':
return a.host.localeCompare(b.host);
case 'lexicographic':
// Compare by value if it's a string, otherwise by delta ID
if (typeof a.value === 'string' && typeof b.value === 'string') {
return a.value.localeCompare(b.value);
}
return a.deltaId.localeCompare(b.deltaId);
default:
throw new Error(`Unknown tie-breaking strategy: ${strategy}`);
}
}
export class TimestampResolver extends Lossy<Accumulator, Result> {
constructor(
lossless: Lossless,
private tieBreakingStrategy: TieBreakingStrategy = 'delta-id'
) {
super(lossless);
}
initializer(): Accumulator {
return {};
}
reducer(acc: Accumulator, cur: LosslessViewOne): Accumulator {
if (!acc[cur.id]) {
acc[cur.id] = { id: cur.id, properties: {} };
}
for (const [key, deltas] of Object.entries(cur.propertyDeltas)) {
let bestProperty: TimestampedPropertyWithTieBreaking | undefined;
for (const delta of deltas || []) {
const value = valueFromCollapsedDelta(key, delta);
if (value === undefined) continue;
const property: TimestampedPropertyWithTieBreaking = {
value,
timeUpdated: delta.timeCreated,
creator: delta.creator,
deltaId: delta.id,
host: delta.host
};
if (!bestProperty || compareWithTieBreaking(property, bestProperty, this.tieBreakingStrategy) > 0) {
bestProperty = property;
}
}
if (bestProperty) {
const existing = acc[cur.id].properties[key];
if (!existing || compareWithTieBreaking(bestProperty, existing, this.tieBreakingStrategy) > 0) {
acc[cur.id].properties[key] = bestProperty;
}
}
}
return acc;
}
resolver(cur: Accumulator): Result {
const res: Result = {};
for (const [id, ent] of Object.entries(cur)) {
res[id] = { id, properties: {} };
for (const [key, timestampedProp] of Object.entries(ent.properties)) {
res[id].properties[key] = timestampedProp.value;
}
}
return res;
}
// Override resolve to build accumulator on-demand if needed
resolve(entityIds?: DomainEntityID[]): Result | undefined {
if (!entityIds) {
entityIds = Array.from(this.lossless.domainEntities.keys());
}
// If we don't have an accumulator, build it from the lossless view
if (!this.accumulator) {
this.accumulator = this.initializer();
// Use the general view method instead of viewSpecific
const fullView = this.lossless.view(entityIds, this.deltaFilter);
for (const entityId of entityIds) {
const losslessViewOne = fullView[entityId];
if (losslessViewOne) {
this.accumulator = this.reducer(this.accumulator, losslessViewOne);
}
}
}
if (!this.accumulator) return undefined;
return this.resolver(this.accumulator);
}
}
// Convenience classes for different tie-breaking strategies
export class CreatorIdTimestampResolver extends TimestampResolver {
constructor(lossless: Lossless) {
super(lossless, 'creator-id');
}
}
export class DeltaIdTimestampResolver extends TimestampResolver {
constructor(lossless: Lossless) {
super(lossless, 'delta-id');
}
}
export class HostIdTimestampResolver extends TimestampResolver {
constructor(lossless: Lossless) {
super(lossless, 'host-id');
}
}
export class LexicographicTimestampResolver extends TimestampResolver {
constructor(lossless: Lossless) {
super(lossless, 'lexicographic');
}
}

View File

View File

@ -0,0 +1 @@
MANIFEST-000020

View File

View File

@ -0,0 +1,3 @@
2025/06/09-22:08:03.381417 7d18dafbe640 Recovering log #19
2025/06/09-22:08:03.388931 7d18dafbe640 Delete type=3 #18
2025/06/09-22:08:03.389041 7d18dafbe640 Delete type=0 #19

View File

@ -0,0 +1,3 @@
2025/06/09-22:06:22.826797 7d82d53fe640 Recovering log #17
2025/06/09-22:06:22.833921 7d82d53fe640 Delete type=0 #17
2025/06/09-22:06:22.833954 7d82d53fe640 Delete type=3 #16

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

View File

@ -0,0 +1 @@
MANIFEST-000241

View File

View File

@ -0,0 +1,5 @@
2025/06/09-22:08:03.351430 7d18da7bd640 Recovering log #240
2025/06/09-22:08:03.351481 7d18da7bd640 Level-0 table #242: started
2025/06/09-22:08:03.353466 7d18da7bd640 Level-0 table #242: 877 bytes OK
2025/06/09-22:08:03.359635 7d18da7bd640 Delete type=0 #240
2025/06/09-22:08:03.359683 7d18da7bd640 Delete type=3 #238

View File

@ -0,0 +1,5 @@
2025/06/09-22:08:03.334848 7d18da7bd640 Recovering log #237
2025/06/09-22:08:03.334894 7d18da7bd640 Level-0 table #239: started
2025/06/09-22:08:03.337138 7d18da7bd640 Level-0 table #239: 855 bytes OK
2025/06/09-22:08:03.344340 7d18da7bd640 Delete type=0 #237
2025/06/09-22:08:03.344389 7d18da7bd640 Delete type=3 #235

Binary file not shown.

Some files were not shown because too many files have changed in this diff Show More