feat: version 2 beta

This commit is contained in:
2025-04-25 22:39:47 +00:00
commit 1e58359905
75 changed files with 6899 additions and 0 deletions

46
.github/workflows/publish.yml vendored Normal file
View File

@@ -0,0 +1,46 @@
name: Publish
on:
workflow_dispatch:
jobs:
lint:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v3
- name: Setup Deno
uses: maximousblk/setup-deno@v2
- name: Setup Node.JS
uses: actions/setup-node@v4
with:
node-version: 22
- run: deno install
- run: deno task lint
test:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v3
- name: Setup Deno
uses: maximousblk/setup-deno@v2
- run: deno install
- run: deno task test
publish:
runs-on: ubuntu-latest
needs: [lint, test]
permissions:
contents: read
id-token: write
steps:
- uses: actions/checkout@v4
- name: Publish package
run: npx jsr publish

38
.github/workflows/test.yml vendored Normal file
View File

@@ -0,0 +1,38 @@
name: Test
on:
pull_request:
branches:
- main
push:
branches:
- main
jobs:
lint:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v3
- name: Setup Deno
uses: maximousblk/setup-deno@v2
- name: Setup Node.JS
uses: actions/setup-node@v4
with:
node-version: 20
- run: deno install
- run: deno task lint
test:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v3
- name: Setup Deno
uses: maximousblk/setup-deno@v2
- run: deno install
- run: deno task test
- run: deno task test:publish

1
.gitignore vendored Normal file
View File

@@ -0,0 +1 @@
node_modules

1
.npmrc Normal file
View File

@@ -0,0 +1 @@
@jsr:registry=https://npm.jsr.io

10
.vscode/settings.json vendored Normal file
View File

@@ -0,0 +1,10 @@
{
"deno.enable": true,
"editor.formatOnSave": true,
"editor.codeActionsOnSave": {
"source.fixAll.eslint": "explicit"
},
"[typescript]": {
"editor.defaultFormatter": "esbenp.prettier-vscode"
}
}

13
LICENSE Normal file
View File

@@ -0,0 +1,13 @@
MIT License
Copyright 2016-2025 Christoffer Rødvik.
Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the
Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the
Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A
PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.

229
README.md Normal file
View File

@@ -0,0 +1,229 @@
<p align="center">
<img src="https://user-images.githubusercontent.com/1998130/229430454-ca0f2811-d874-4314-b13d-c558de8eec7e.svg" />
</p>
# Event Store
Event store solution written in deno for use in TypeScript projects to manage and distribute events from a central
repository to one or more distibuted services.
## Quick Start
The following provides a quick introduction on how to get started.
### Configs
Events are defined in `json` configuration files which we print to a generated `events.ts` file that is used by the
event store instance we are using. To do this, start by creating a new folder that will house our event configurations.
```sh
$ mkdir events
$ cd events
```
Now add a new event configuration file.
```sh
$ touch user-created.json
```
Open the file and add the event details.
```json
{
"event": {
"type": "user:created",
"data": {
"name": {
"type": "object",
"properties": {
"given": {
"type": "string"
},
"family": {
"type": "string"
}
}
},
"email": {
"type": "string"
}
},
"meta": {
"auditor": {
"type": "string"
}
}
}
}
```
### Generate
To create our `events.ts` file we have to run our configurations through our event printer.
```ts
import { printEvents } from "@valkyr/event-store";
await printEvents({
inputs: ["./configs/events"],
outputs: ["./generated/events.ts"],
});
```
### Event Store
Once we have defined our configs and printed our events we create a new event store instance. Currently we have support
for `sqlite`, `postgres`, and `valkyr/db` which all works the same way. So for this example we will use the `sqlite`
store.
- Browser _(TODO)_
- Mongo _(TODO)_
- [Postgres](./adapters/postgres)
### Reducers
Event reducers takes a entity stream and reduces it to a wanted state. This is required when we want to perform write
side business logic on the current state of our streams. Using read stores for this is not ideal as the read side data
may not be up to date.
```ts
import { makeReducer } from "@valkyr/event-store";
import type { EventRecord } from "./generated/events.ts";
const reducer = makeReducer<{
name: string;
email: string;
}, EventRecord>((state, event) => {
switch (event.type) {
case "user:created": {
state.name = `${event.data.name.given} ${event.data.name.family}`;
state.email = event.data.email;
break;
}
case "user:email-set": {
state.email = event.data.email;
break;
}
}
return state;
}, "user", () => ({
name: "",
email: "",
}));
```
### Aggreates
Event aggregates takes a entity stream and reduces it to a wanted state. It works on the same conceptual grounds as
the standard reducer but resolved states using an aggregate instead of folding onto a state object.
The benefit of this is that we can create various helper methods on the aggregate that can help us navigate and
query the aggregated state.
```ts
import { AggregateRoot, makeAggregateReducer } from "@valkyr/event-store";
import type { EventRecord } from "./generated/events.ts";
import { eventStore } from "./event-store.ts";
export class User extends AggregateRoot<EventRecord> {
name!: Name;
email!: string;
// -------------------------------------------------------------------------
// Factories
// -------------------------------------------------------------------------
static #reducer = makeAggregateReducer(User, "user");
static async getById(userId: string): Promise<User | undefined> {
return eventStore.reduce({ stream: userId, reducer: this.#reducer });
}
// -------------------------------------------------------------------------
// Folder
// -------------------------------------------------------------------------
with(event: EventRecord) {
switch (event.type) {
case "user:created": {
this.name = event.data.name;
this.email = event.data.email;
break;
}
case "user:email-set": {
this.email = event.data.email;
break;
}
}
}
// -------------------------------------------------------------------------
// Utilities
// -------------------------------------------------------------------------
fullName() {
return `${this.name.given} ${this.name.family}`;
}
}
type Name = {
given: string;
family: string;
}
```
### Projectors
Projectors serves as a bridge between the write side and read side of your application. Think of them as event handlers
that listens for an event and creates new read side records by pushing that data to one or more data stores or apis
which is queried by your users.
A projector is registered for a specific event type, and can have multiple handlers. They also come with three different
types of listeners, `once`, `on`, and `all`.
```ts
import { projector } from "./event-store.ts";
projector.on("user:created", async (record) => {
// do something with the event record ...
});
```
### Hydration in Event Processing
When handling events in a distributed system or during event replay operations, it is important to differentiate between **new events** and **rehydrated events**.
- **New Events (`hydrate: false`)**: These events are being processed for the first time. They will trigger all projection handlers, including `.once()`, `.on()`, and `.all()`.
- **Rehydrated Events (`hydrate: true`)**: These events are being replayed, either as part of a stream synchronization, system recovery, or reprocessing in a distributed environment. They **will not trigger** `.once()` handlers to avoid redundant side effects but will still be processed by `.on()` and `.all()` handlers where applicable.
This mechanism ensures that critical one-time operations (such as sending emails or initiating external API calls) are **not repeated** unnecessarily while still allowing stateful projections to update their read models correctly.
#### `.once("user:created", (event) => Promise<void>)`
This handler tells the projection that an event is only ever processed when the event is originating directly from the
local event store. A useful pattern for when you want the event handler to submit data to a third party service such as
sending an email or submitting third party orders. We disallow `hydrate` and `outdated` as these events represents
events that has already been processed.
#### `.on("user:created", (event) => Promise<void>)`
This method tells the projection to allow events directly from the event store as well as events coming through
hydration via sync, manual or automatic stream rehydration operations. This is the default pattern used for most events.
This is where you usually project the latest data to your read side models and data stores.
We allow `hydrate` as they serve to keep the read side up to date with the latest events.
We disallow `outdated` as we do not want the latest data to be overridden by outdated ones.
NOTE! The nature of this pattern means that outdated events are never run by this projection. Make sure to handle
`outdated` events if you have processing requirements that needs to know about every unknown events that has occurred in
the event stream.
#### `.all("user:created", (event) => Promise<void>)`
This method is a catch all for events that does not fall under the stricter definitions of once and on patterns. This is
a good place to deal with data that does not depend on a strict order of events.

View File

@@ -0,0 +1,36 @@
import type { IndexedDatabase } from "@valkyr/db";
import { Event } from "../../libraries/event.ts";
import { EventStoreAdapter } from "../../types/adapter.ts";
import { Adapter, Collections, EventStoreDB, getEventStoreDatabase } from "./database.ts";
import { BrowserEventsProvider } from "./providers/events.ts";
import { BrowserRelationsProvider } from "./providers/relations.ts";
import { BrowserSnapshotsProvider } from "./providers/snapshots.ts";
/**
* A browser-based event store adapter that integrates database-specific providers.
*
* The `BrowserAdapter` enables event sourcing in a browser environment by utilizing
* IndexedDB for storage. It provides implementations for event storage, relations,
* and snapshots, allowing seamless integration with the shared event store interface.
*
* @template TEvent - The type of events managed by the event store.
*/
export class BrowserAdapter<const TEvent extends Event> implements EventStoreAdapter<EventStoreDB> {
readonly #database: IndexedDatabase<Collections>;
providers: EventStoreAdapter<TEvent>["providers"];
constructor(database: Adapter, name = "valkyr:event-store") {
this.#database = getEventStoreDatabase(name, database) as IndexedDatabase<Collections>;
this.providers = {
events: new BrowserEventsProvider(this.#database.collection("events")),
relations: new BrowserRelationsProvider(this.#database.collection("relations")),
snapshots: new BrowserSnapshotsProvider(this.#database.collection("snapshots")),
};
}
get db(): IndexedDatabase<Collections> {
return this.#database;
}
}

View File

@@ -0,0 +1,73 @@
import { IndexedDatabase, MemoryDatabase } from "@valkyr/db";
import { EventRecord } from "../../libraries/event.ts";
export function getEventStoreDatabase(name: string, adapter: Adapter): EventStoreDB {
switch (adapter) {
case "indexeddb": {
return new IndexedDatabase<Collections>({
name,
version: 1,
registrars: [
{
name: "events",
indexes: [
["stream", { unique: false }],
["created", { unique: false }],
["recorded", { unique: false }],
],
},
{
name: "relations",
indexes: [
["key", { unique: false }],
["stream", { unique: false }],
],
},
{
name: "snapshots",
indexes: [
["name", { unique: false }],
["stream", { unique: false }],
["cursor", { unique: false }],
],
},
],
});
}
case "memorydb": {
return new MemoryDatabase<Collections>({
name,
registrars: [{ name: "events" }, { name: "relations" }, { name: "snapshots" }],
});
}
}
}
/*
|--------------------------------------------------------------------------------
| Types
|--------------------------------------------------------------------------------
*/
export type EventStoreDB = IndexedDatabase<Collections> | MemoryDatabase<Collections>;
export type Adapter = "indexeddb" | "memorydb";
export type Collections = {
events: EventRecord;
relations: Relation;
snapshots: Snapshot;
};
export type Relation = {
key: string;
stream: string;
};
export type Snapshot = {
name: string;
stream: string;
cursor: string;
state: Record<string, unknown>;
};

View File

@@ -0,0 +1,122 @@
import type { Collection } from "@valkyr/db";
import { EventRecord } from "../../../libraries/event.ts";
import type { EventsProvider } from "../../../types/adapter.ts";
import type { EventReadOptions } from "../../../types/query.ts";
export class BrowserEventsProvider implements EventsProvider {
constructor(readonly events: Collection<EventRecord>) {}
/**
* Insert a new event record to the events table.
*
* @param record - Event record to insert.
* @param tx - Transaction to insert the record within. (Optional)
*/
async insert(record: EventRecord): Promise<void> {
await this.events.insertOne(record);
}
/**
* Insert many new event records to the events table.
*
* @param records - Event records to insert.
* @param batchSize - Batch size for the insert loop.
*/
async insertMany(records: EventRecord[], batchSize: number = 1_000): Promise<void> {
for (let i = 0; i < records.length; i += batchSize) {
await this.events.insertMany(records.slice(i, i + batchSize));
}
}
/**
* Retrieve all the events in the events table. Optionally a cursor and direction
* can be provided to reduce the list of events returned.
*
* @param options - Find options.
*/
async get({ filter, cursor, direction }: EventReadOptions = {}): Promise<EventRecord[]> {
const query: any = {};
if (filter?.types !== undefined) {
withTypes(query, filter.types);
}
if (cursor !== undefined) {
withCursor(query, cursor, direction);
}
return (await this.events.find(query, { sort: { created: 1 } })) as EventRecord[];
}
/**
* Get events within the given stream.
*
* @param stream - Stream to fetch events for.
* @param options - Read options for modifying the result.
*/
async getByStream(stream: string, { filter, cursor, direction }: EventReadOptions = {}): Promise<EventRecord[]> {
const query: any = { stream };
if (filter?.types !== undefined) {
withTypes(query, filter.types);
}
if (cursor !== undefined) {
withCursor(query, cursor, direction);
}
return (await this.events.find(query, { sort: { created: 1 } })) as EventRecord[];
}
/**
* Get events within given list of streams.
*
* @param streams - Stream to get events for.
*/
async getByStreams(streams: string[], { filter, cursor, direction }: EventReadOptions = {}): Promise<EventRecord[]> {
const query: any = { stream: { $in: streams } };
if (filter?.types !== undefined) {
withTypes(query, filter.types);
}
if (cursor !== undefined) {
withCursor(query, cursor, direction ?? "asc");
}
return (await this.events.find(query, { sort: { created: 1 } })) as EventRecord[];
}
/**
* Get a single event by its id.
*
* @param id - Event id.
*/
async getById(id: string): Promise<EventRecord | undefined> {
return (await this.events.findById(id)) satisfies EventRecord | undefined;
}
/**
* Check if the given event is outdated in relation to the local event data.
*/
async checkOutdated({ stream, type, created }: EventRecord): Promise<boolean> {
const count = await this.events.count({
stream,
type,
created: {
$gt: created,
},
} as any);
return count > 0;
}
}
/*
|--------------------------------------------------------------------------------
| Query Builders
|--------------------------------------------------------------------------------
*/
function withTypes(filter: any, types: string[]): void {
filter.type = { $in: types };
}
function withCursor(filter: any, cursor: string, direction?: 1 | -1 | "asc" | "desc"): void {
if (cursor !== undefined) {
filter.created = {
[direction === "desc" || direction === -1 ? "$lt" : "$gt"]: cursor,
};
}
}

View File

@@ -0,0 +1,109 @@
import type { Collection } from "@valkyr/db";
import type { Relation, RelationPayload, RelationsProvider } from "../../../types/adapter.ts";
export class BrowserRelationsProvider implements RelationsProvider {
constructor(readonly relations: Collection<Relation>) {}
/**
* Handle incoming relation operations.
*
* @param relations - List of relation operations to execute.
*/
async handle(relations: Relation[]): Promise<void> {
await Promise.all([
this.insertMany(relations.filter((relation) => relation.op === "insert")),
this.removeMany(relations.filter((relation) => relation.op === "remove")),
]);
}
/**
* Add stream to the relations table.
*
* @param key - Relational key to add stream to.
* @param stream - Stream to add to the key.
*/
async insert(key: string, stream: string): Promise<void> {
await this.relations.insertOne({ key, stream });
}
/**
* Add stream to many relational keys onto the relations table.
*
* @param relations - Relations to insert.
* @param batchSize - Batch size for the insert loop.
*/
async insertMany(relations: { key: string; stream: string }[], batchSize: number = 1_000): Promise<void> {
for (let i = 0; i < relations.length; i += batchSize) {
await this.relations.insertMany(relations.slice(i, i + batchSize).map(({ key, stream }) => ({ key, stream })));
}
}
/**
* Get a list of event streams registered under the given relational key.
*
* @param key - Relational key to get event streams for.
*/
async getByKey(key: string): Promise<string[]> {
return this.relations.find({ key }).then((relations) => relations.map(({ stream }) => stream));
}
/**
* Get a list of event streams registered under the given relational keys.
*
* @param keys - Relational keys to get event streams for.
*/
async getByKeys(keys: string[]): Promise<string[]> {
return this.relations.find({ key: { $in: keys } }).then((relations) => {
const streamIds = new Set<string>();
for (const relation of relations) {
streamIds.add(relation.stream);
}
return Array.from(streamIds);
});
}
/**
* Removes a stream from the relational table.
*
* @param key - Relational key to remove stream from.
* @param stream - Stream to remove from relation.
*/
async remove(key: string, stream: string): Promise<void> {
await this.relations.remove({ key, stream });
}
/**
* Removes multiple relational entries.
*
* @param relations - Relations to remove stream from.
* @param batchSize - Batch size for the insert loop.
*/
async removeMany(relations: RelationPayload[], batchSize: number = 1_000): Promise<void> {
const promises = [];
for (let i = 0; i < relations.length; i += batchSize) {
for (const relation of relations.slice(i, i + batchSize)) {
promises.push(this.remove(relation.key, relation.stream));
}
}
await Promise.all(promises);
}
/**
* Remove all relations bound to the given relational keys.
*
* @param keys - Relational keys to remove from the relational table.
*/
async removeByKeys(keys: string[]): Promise<void> {
await this.relations.remove({ key: { $in: keys } });
}
/**
* Remove all relations bound to the given streams.
*
* @param streams - Streams to remove from the relational table.
*/
async removeByStreams(streams: string[]): Promise<void> {
await this.relations.remove({ stream: { $in: streams } });
}
}

View File

@@ -0,0 +1,39 @@
import type { Collection } from "@valkyr/db";
import type { Snapshot, SnapshotsProvider } from "../../../types/adapter.ts";
export class BrowserSnapshotsProvider implements SnapshotsProvider {
constructor(readonly snapshots: Collection<Snapshot>) {}
/**
* Add snapshot state under given reducer stream.
*
* @param name - Name of the reducer the snapshot is attached to.
* @param stream - Stream the snapshot is attached to.
* @param cursor - Cursor timestamp for the last event used in the snapshot.
* @param state - State of the reduced events.
*/
async insert(name: string, stream: string, cursor: string, state: Record<string, unknown>): Promise<void> {
await this.snapshots.insertOne({ name, stream, cursor, state });
}
/**
* Get snapshot state by stream.
*
* @param name - Name of the reducer which the state was created.
* @param stream - Stream the state was reduced for.
*/
async getByStream(name: string, stream: string): Promise<Snapshot | undefined> {
return this.snapshots.findOne({ name, stream });
}
/**
* Removes a snapshot for the given reducer stream.
*
* @param name - Name of the reducer the snapshot is attached to.
* @param stream - Stream to remove from snapshots.
*/
async remove(name: string, stream: string): Promise<void> {
await this.snapshots.remove({ name, stream });
}
}

91
adapters/mongo/adapter.ts Normal file
View File

@@ -0,0 +1,91 @@
import type { MongoConnectionUrl } from "@valkyr/testcontainers/mongodb";
import { Db, MongoClient } from "mongodb";
import { EventStoreAdapter } from "../../types/adapter.ts";
import { registrars } from "./collections/mod.ts";
import { MongoEventsProvider } from "./providers/events.ts";
import { MongoRelationsProvider } from "./providers/relations.ts";
import { MongoSnapshotsProvider } from "./providers/snapshots.ts";
import { DatabaseAccessor } from "./types.ts";
import { getCollectionsSet } from "./utilities.ts";
/**
* A server-based event store adapter that integrates database-specific providers.
*
* The `MongoAdapter` enables event sourcing in a back end environment by utilizing
* MongoDB for storage. It provides implementations for event storage, relations,
* and snapshots, allowing seamless integration with the shared event store interface.
*
* @template TEvent - The type of events managed by the event store.
*/
export class MongoAdapter implements EventStoreAdapter<DatabaseAccessor> {
readonly providers: {
readonly events: MongoEventsProvider;
readonly relations: MongoRelationsProvider;
readonly snapshots: MongoSnapshotsProvider;
};
readonly #accessor: DatabaseAccessor;
constructor(connection: MongoConnection, db: string) {
this.#accessor = getDatabaseAccessor(connection, db);
this.providers = {
events: new MongoEventsProvider(this.#accessor),
relations: new MongoRelationsProvider(this.#accessor),
snapshots: new MongoSnapshotsProvider(this.#accessor),
};
}
get db(): DatabaseAccessor {
return this.#accessor;
}
}
/**
* Takes a mongo database and registers the event store collections and
* indexes defined internally.
*
* @param db - Mongo database to register event store collections against.
* @param logger - Logger method to print internal logs.
*/
export async function register(db: Db, logger?: (...args: any[]) => any) {
const list = await getCollectionsSet(db);
for (const { name, indexes } of registrars) {
if (list.has(name)) {
continue;
}
await db.createCollection(name);
for (const [indexSpec, options] of indexes) {
await db.collection(name).createIndex(indexSpec, options);
logger?.("Mongo Event Store > Collection '%s' is indexed [%O] with options %O", name, indexSpec, options ?? {});
}
logger?.("Mongo Event Store > Collection '%s' is registered", name);
}
}
function getDatabaseAccessor(connection: MongoConnection, database: string): DatabaseAccessor {
let instance: Db | undefined;
return {
get db(): Db {
if (instance === undefined) {
instance = this.client.db(database);
}
return instance;
},
get client(): MongoClient {
if (typeof connection === "string") {
return new MongoClient(connection);
}
if (connection instanceof MongoClient) {
return connection;
}
return connection();
},
};
}
/**
* Connection which the adapter supports, this can be a `url`, a `client` instance
* or a lazy method that provided `client` instance on demand.
*/
export type MongoConnection = MongoConnectionUrl | MongoClient | (() => MongoClient);

View File

@@ -0,0 +1,49 @@
import z from "zod";
import type { CollectionRegistrar } from "../types.ts";
export const registrar: CollectionRegistrar = {
name: "events",
indexes: [
[
{
stream: 1,
},
],
[
{
type: 1,
},
],
[
{
recorded: 1,
},
],
[
{
created: 1,
},
],
],
};
export const schema = z.object({
id: z.string(),
stream: z.string(),
type: z.string(),
data: z.any(),
meta: z.any(),
recorded: z.string(),
created: z.string(),
});
export type EventSchema = {
id: string;
stream: string;
type: string;
data: Record<string, any> | null;
meta: Record<string, any> | null;
recorded: string;
created: string;
};

View File

@@ -0,0 +1,10 @@
import { CollectionRegistrar } from "../types.ts";
import { registrar as events } from "./events.ts";
import { registrar as relations } from "./relations.ts";
import { registrar as snapshots } from "./snapshots.ts";
export const registrars: CollectionRegistrar[] = [
events,
relations,
snapshots,
];

View File

@@ -0,0 +1,38 @@
import z from "zod";
import type { CollectionRegistrar } from "../types.ts";
export const registrar: CollectionRegistrar = {
name: "relations",
indexes: [
[
{
key: 1,
},
],
[
{
stream: 1,
},
],
[
{
key: 1,
stream: 1,
},
{
unique: true,
},
],
],
};
export const schema = z.object({
key: z.string(),
streams: z.string().array(),
});
export type RelationSchema = {
key: string;
streams: string[];
};

View File

@@ -0,0 +1,30 @@
import z from "zod";
import type { CollectionRegistrar } from "../types.ts";
export const registrar: CollectionRegistrar = {
name: "snapshots",
indexes: [
[
{
name: 1,
stream: 1,
cursor: 1,
},
],
],
};
export const schema = z.object({
name: z.string(),
stream: z.string(),
cursor: z.string(),
state: z.record(z.string(), z.any()),
});
export type SnapshotSchema = {
name: string;
stream: string;
cursor: string;
state: Record<string, any>;
};

View File

@@ -0,0 +1,131 @@
import type { Collection, FindCursor } from "mongodb";
import { EventRecord } from "../../../libraries/event.ts";
import type { EventsProvider } from "../../../types/adapter.ts";
import type { EventReadOptions } from "../../../types/query.ts";
import { type EventSchema, schema } from "../collections/events.ts";
import { DatabaseAccessor } from "../types.ts";
import { toParsedRecord, toParsedRecords } from "../utilities.ts";
export class MongoEventsProvider implements EventsProvider {
readonly #accessor: DatabaseAccessor;
constructor(accessor: DatabaseAccessor) {
this.#accessor = accessor;
}
get collection(): Collection<EventSchema> {
return this.#accessor.db.collection<EventSchema>("events");
}
/**
* Insert a new event record to the events table.
*
* @param record - Event record to insert.
* @param tx - Transaction to insert the record within. (Optional)
*/
async insert(record: EventRecord): Promise<void> {
await this.collection.insertOne(record, { forceServerObjectId: true });
}
/**
* Insert many new event records to the events table.
*
* @param records - Event records to insert.
*/
async insertMany(records: EventRecord[]): Promise<void> {
await this.collection.insertMany(records, { forceServerObjectId: true });
}
/**
* Retrieve all the events in the events table. Optionally a cursor and direction
* can be provided to reduce the list of events returned.
*
* @param options - Find options.
*/
async get(options: EventReadOptions = {}): Promise<EventRecord[]> {
return (await this.#withReadOptions(this.collection.find(this.#withFilters(options)), options)
.sort({ created: 1 })
.toArray()
.then(toParsedRecords(schema))) as EventRecord[];
}
/**
* Get events within the given stream.
*
* @param stream - Stream to fetch events for.
* @param options - Read options for modifying the result.
*/
async getByStream(stream: string, options: EventReadOptions = {}): Promise<EventRecord[]> {
return (await this.#withReadOptions(this.collection.find({ stream, ...this.#withFilters(options) }), options)
.sort({ created: 1 })
.toArray()
.then(toParsedRecords(schema))) as EventRecord[];
}
/**
* Get events within given list of streams.
*
* @param streams - Stream to get events for.
* @param options - Read options for modifying the result.
*/
async getByStreams(streams: string[], options: EventReadOptions = {}): Promise<EventRecord[]> {
return (await this.#withReadOptions(this.collection.find({ stream: { $in: streams }, ...this.#withFilters(options) }), options)
.sort({ created: 1 })
.toArray()
.then(toParsedRecords(schema))) as EventRecord[];
}
/**
* Get a single event by its id.
*
* @param id - Event id.
*/
async getById(id: string): Promise<EventRecord | undefined> {
return (await this.collection.findOne({ id }).then(toParsedRecord(schema))) as EventRecord | undefined;
}
/**
* Check if the given event is outdated in relation to the local event data.
*
* @param event - Event record to check for outdated state for.
*/
async checkOutdated({ stream, type, created }: EventRecord): Promise<boolean> {
const count = await this.collection.countDocuments({
stream,
type,
created: {
$gt: created,
},
});
return count > 0;
}
/*
|--------------------------------------------------------------------------------
| Utilities
|--------------------------------------------------------------------------------
*/
#withFilters({ filter }: EventReadOptions): { type?: { $in: string[] } } {
const types = filter?.types;
if (types !== undefined) {
return { type: { $in: types } };
}
return {};
}
#withReadOptions(fc: FindCursor, { cursor, direction, limit }: EventReadOptions): FindCursor {
if (cursor !== undefined) {
if (direction === "desc" || direction === -1) {
fc.filter({ created: { $lt: cursor } });
} else {
fc.filter({ created: { $gt: cursor } });
}
}
if (limit !== undefined) {
fc.limit(limit);
}
return fc;
}
}

View File

@@ -0,0 +1,168 @@
import type { Collection } from "mongodb";
import { Relation, RelationPayload, RelationsProvider } from "../../../types/adapter.ts";
import { type RelationSchema, schema } from "../collections/relations.ts";
import { DatabaseAccessor } from "../types.ts";
import { toParsedRecord, toParsedRecords } from "../utilities.ts";
export class MongoRelationsProvider implements RelationsProvider {
readonly #accessor: DatabaseAccessor;
constructor(accessor: DatabaseAccessor) {
this.#accessor = accessor;
}
get collection(): Collection<RelationSchema> {
return this.#accessor.db.collection<RelationSchema>("relations");
}
/**
* Handle incoming relation operations.
*
* @param relations - List of relation operations to execute.
*/
async handle(relations: Relation[]): Promise<void> {
await Promise.all([
this.insertMany(relations.filter((relation) => relation.op === "insert")),
this.removeMany(relations.filter((relation) => relation.op === "remove")),
]);
}
/**
* Add stream to the relations table.
*
* @param key - Relational key to add stream to.
* @param stream - Stream to add to the key.
*/
async insert(key: string, stream: string): Promise<void> {
await this.collection.updateOne({ key }, { $addToSet: { streams: stream } }, { upsert: true });
}
/**
* Add stream to many relational keys onto the relations table.
*
* @param relations - Relations to insert.
*/
async insertMany(relations: RelationPayload[], batchSize = 1_000): Promise<void> {
const reduced = relations.reduce((map, { key, stream }) => {
if (map.has(key) === false) {
map.set(key, new Set<string>());
}
map.get(key)!.add(stream);
return map;
}, new Map<string, Set<string>>());
const bulkOps = [];
for (const [key, streams] of reduced) {
bulkOps.push({
updateOne: {
filter: { key },
update: { $addToSet: { streams: { $each: Array.from(streams) } } },
upsert: true,
},
});
}
for (let i = 0; i < bulkOps.length; i += batchSize) {
await this.collection.bulkWrite(bulkOps.slice(i, i + batchSize), { ordered: false });
}
}
/**
* Get a list of event streams registered under the given relational key.
*
* @param key - Relational key to get event streams for.
*/
async getByKey(key: string): Promise<string[]> {
const relations = await this.collection.findOne({ key }).then(toParsedRecord(schema));
if (relations === undefined) {
return [];
}
return relations.streams;
}
/**
* Get a list of event streams registered under the given relational keys.
*
* @param keys - Relational keys to get event streams for.
*/
async getByKeys(keys: string[]): Promise<string[]> {
const streams = new Set<string>();
const documents = await this.collection
.find({ key: { $in: keys } })
.toArray()
.then(toParsedRecords(schema));
documents.forEach((document) => {
for (const stream of document.streams) {
streams.add(stream);
}
});
return Array.from(streams);
}
/**
* Removes a stream from the relational table.
*
* @param key - Relational key to remove stream from.
* @param stream - Stream to remove from relation.
*/
async remove(key: string, stream: string): Promise<void> {
await this.collection.updateOne({ key }, { $pull: { streams: stream } });
}
/**
* Removes multiple relational entries.
*
* @param relations - Relations to remove stream from.
*/
async removeMany(relations: RelationPayload[], batchSize = 1_000): Promise<void> {
const reduced = relations.reduce((map, { key, stream }) => {
if (!map.has(key)) {
map.set(key, new Set());
}
map.get(key)!.add(stream);
return map;
}, new Map<string, Set<string>>());
const bulkOps = [];
for (const [key, streams] of reduced) {
bulkOps.push({
updateOne: {
filter: { key },
update: { $pull: { streams: { $in: Array.from(streams) } } },
},
});
}
for (let i = 0; i < bulkOps.length; i += batchSize) {
await this.collection.bulkWrite(bulkOps.slice(i, i + batchSize), { ordered: false });
}
}
/**
* Remove all relations bound to the given relational keys.
*
* @param keys - Relational keys to remove from the relational table.
*/
async removeByKeys(keys: string[]): Promise<void> {
await this.collection.deleteMany({ key: { $in: keys } });
}
/**
* Remove all relations bound to the given streams.
*
* @param streams - Streams to remove from the relational table.
*/
async removeByStreams(streams: string[]): Promise<void> {
await this.collection.bulkWrite(
streams.map((stream) => ({
updateOne: {
filter: { streams: stream },
update: { $pull: { streams: stream } },
},
})),
);
}
}

View File

@@ -0,0 +1,50 @@
import type { Collection } from "mongodb";
import { SnapshotsProvider } from "../../../types/adapter.ts";
import { schema, type SnapshotSchema } from "../collections/snapshots.ts";
import { DatabaseAccessor } from "../types.ts";
import { toParsedRecord } from "../utilities.ts";
export class MongoSnapshotsProvider implements SnapshotsProvider {
readonly #accessor: DatabaseAccessor;
constructor(accessor: DatabaseAccessor) {
this.#accessor = accessor;
}
get collection(): Collection<SnapshotSchema> {
return this.#accessor.db.collection<SnapshotSchema>("snapshots");
}
/**
* Add snapshot state under given reducer stream.
*
* @param name - Name of the reducer the snapshot is attached to.
* @param stream - Stream the snapshot is attached to.
* @param cursor - Cursor timestamp for the last event used in the snapshot.
* @param state - State of the reduced events.
*/
async insert(name: string, stream: string, cursor: string, state: Record<string, unknown>): Promise<void> {
await this.collection.updateOne({ name }, { $set: { stream, cursor, state } }, { upsert: true });
}
/**
* Get snapshot state by stream.
*
* @param name - Name of the reducer which the state was created.
* @param stream - Stream the state was reduced for.
*/
async getByStream(name: string, stream: string): Promise<SnapshotSchema | undefined> {
return this.collection.findOne({ name, stream }).then(toParsedRecord(schema));
}
/**
* Removes a snapshot for the given reducer stream.
*
* @param name - Name of the reducer the snapshot is attached to.
* @param stream - Stream to remove from snapshots.
*/
async remove(name: string, stream: string): Promise<void> {
await this.collection.deleteOne({ name, stream });
}
}

11
adapters/mongo/types.ts Normal file
View File

@@ -0,0 +1,11 @@
import type { CreateIndexesOptions, Db, IndexSpecification, MongoClient } from "mongodb";
export type CollectionRegistrar = {
name: string;
indexes: [IndexSpecification, CreateIndexesOptions?][];
};
export type DatabaseAccessor = {
db: Db;
client: MongoClient;
};

View File

@@ -0,0 +1,43 @@
import type { Db, WithId } from "mongodb";
import type { z, ZodObject } from "zod";
/**
* Take a list of records and run it through the given zod parser. This
* ensures that all the documents in resulting list adheres to the
* expected schematics before b eing returned. Any deviation in the list
* will result in an internal error being thrown.
*
* @param parser - Zod parser to run the documents through.
*/
export function toParsedRecords<TSchema extends ZodObject>(parser: TSchema): (documents: WithId<object>[]) => z.infer<TSchema>[] {
return parser.array().parse;
}
/**
* Take a single nullable document value and run it through the given zod
* parser. This ensures that the data adheres to the expected schematics
* before being returned. Any deviation in the expected response will result
* in an internal error being thrown.
*
* @param parser - Zod parser to run the document through.
*/
export function toParsedRecord<TSchema extends ZodObject>(parser: TSchema): (document: WithId<object> | null) => z.infer<TSchema> | undefined {
return function (document) {
if (document === null) {
return undefined;
}
return parser.parse(document);
};
}
/**
* Get a Set of collections that exists on a given mongo database instance.
*
* @param db - Mongo database to fetch collection list for.
*/
export async function getCollectionsSet(db: Db) {
return db
.listCollections()
.toArray()
.then((collections) => new Set(collections.map((c) => c.name)));
}

102
adapters/postgres/README.md Normal file
View File

@@ -0,0 +1,102 @@
<p align="center">
<img src="https://user-images.githubusercontent.com/1998130/229430454-ca0f2811-d874-4314-b13d-c558de8eec7e.svg" />
</p>
# Postgres Adapter
The following instructions aims to guide you through setting up @valkyr/event-store with a postgres database.
## Event Store
Once we have defined our configs and printed our events we create a new postgres event store instance.
```ts
import { makePostgresEventStore } from "@valkyr/event-store/postgres";
import postgres from "postgres";
import { type Event, type EventRecord, events, validators } from "./generated/events.ts";
export const eventStore = makePostgresEventStore<Event>({
connection: () => postgres("postgres://${string}:${string}@${string}:${number}/${string}"), // lazy loaded connection
schema: "event_store",
events,
validators,
hooks: {
async onError(error) {
// when the event store throws unhandled errors they will end up in
// this location that can be further logged in the systems own logger
// if onError hook is not provided all unhandled errors are logged
// through the `console.error` method.
},
},
});
const projector = new Projector<EventRecord>();
eventStore.onEventsInserted(async (records, { batch }) => {
// trigger event side effects here such as sending the records through
// an event messaging system or other projection patterns
// ### Projector
// The following is an example when registering event handlers with the
// projectors instance provided by this library.
if (batch !== undefined) {
await projector.pushMany(batch, records);
} else {
for (const record of records) {
await projector.push(record, { hydrated: false, outdated: false });
}
}
});
```
## Migrations
We do not manage migrations in your local solutions so what we provide is a sample SQL script for optimal query setup. The following example assumes all event tables goes into a `event_store` schema. If you are adding these tables to a different schema or into the public default postgres space you will need to modify this sample accordingly.
```sql
CREATE SCHEMA "event_store";
-- Event Table
CREATE TABLE IF NOT EXISTS "event_store"."events" (
"id" varchar PRIMARY KEY NOT NULL,
"stream" varchar NOT NULL,
"type" varchar NOT NULL,
"data" jsonb NOT NULL,
"meta" jsonb NOT NULL,
"recorded" varchar NOT NULL,
"created" varchar NOT NULL
);
CREATE INDEX IF NOT EXISTS "events_stream_index" ON "event_store"."events" USING btree ("stream");
CREATE INDEX IF NOT EXISTS "events_type_index" ON "event_store"."events" USING btree ("type");
CREATE INDEX IF NOT EXISTS "events_recorded_index" ON "event_store"."events" USING btree ("recorded");
CREATE INDEX IF NOT EXISTS "events_created_index" ON "event_store"."events" USING btree ("created");
-- Relations Table
CREATE TABLE IF NOT EXISTS "event_store"."relations" (
"id" serial PRIMARY KEY NOT NULL,
"key" varchar NOT NULL,
"stream" varchar NOT NULL,
UNIQUE ("key", "stream")
);
CREATE INDEX IF NOT EXISTS "relations_key_index" ON "event_store"."relations" USING btree ("key");
CREATE INDEX IF NOT EXISTS "relations_stream_index" ON "event_store"."relations" USING btree ("stream");
-- Snapshots Table
CREATE TABLE IF NOT EXISTS "event_store"."snapshots" (
"id" serial PRIMARY KEY NOT NULL,
"name" varchar NOT NULL,
"stream" varchar NOT NULL,
"cursor" varchar NOT NULL,
"state" jsonb NOT NULL,
UNIQUE ("name", "stream")
);
CREATE INDEX IF NOT EXISTS "snapshots_name_stream_cursor_index" ON "event_store"."snapshots" USING btree ("name","stream","cursor");
```

View File

@@ -0,0 +1,45 @@
import { EventStoreAdapter } from "../../types/adapter.ts";
import { PostgresConnection } from "./connection.ts";
import { PostgresDatabase } from "./database.ts";
import { PostgresEventsProvider } from "./providers/event.ts";
import { PostgresRelationsProvider } from "./providers/relations.ts";
import { PostgresSnapshotsProvider } from "./providers/snapshot.ts";
/**
* A server-based event store adapter that integrates database-specific providers.
*
* The `PostgresAdapter` enables event sourcing in a back end environment by utilizing
* PostgreSql for storage. It provides implementations for event storage, relations,
* and snapshots, allowing seamless integration with the shared event store interface.
*
* @template TEvent - The type of events managed by the event store.
*/
export class PostgresAdapter implements EventStoreAdapter<PostgresDatabase> {
readonly providers: {
readonly events: PostgresEventsProvider;
readonly relations: PostgresRelationsProvider;
readonly snapshots: PostgresSnapshotsProvider;
};
#database: PostgresDatabase;
constructor(
readonly connection: PostgresConnection,
readonly options: Options = {},
) {
this.#database = new PostgresDatabase(connection);
this.providers = {
events: new PostgresEventsProvider(this.#database, options.schema),
relations: new PostgresRelationsProvider(this.#database, options.schema),
snapshots: new PostgresSnapshotsProvider(this.#database, options.schema),
};
}
get db(): PostgresDatabase {
return this.#database;
}
}
type Options = {
schema?: string;
};

View File

@@ -0,0 +1,7 @@
import type { Options, Sql } from "postgres";
export type PostgresConnection = [PostgresConnectionUrl, Options<any>?] | [Options<any>] | Sql | PostgresConnectionFactory;
type PostgresConnectionUrl = `postgres://${string}:${string}@${string}:${number}/${string}`;
type PostgresConnectionFactory = () => Sql;

View File

@@ -0,0 +1,36 @@
import postgres, { type Sql } from "postgres";
import { PostgresConnection } from "./connection.ts";
export class PostgresDatabase {
readonly #connection: PostgresConnection;
#sql?: Sql;
constructor(connection: PostgresConnection) {
this.#connection = connection;
}
get sql(): Sql {
if (this.#sql === undefined) {
const connection = this.#connection;
if (Array.isArray(connection)) {
const [urlOrOptions, option] = connection;
if (typeof urlOrOptions === "string") {
this.#sql = postgres(urlOrOptions, option);
} else {
this.#sql = postgres(urlOrOptions);
}
} else if ("options" in connection) {
this.#sql = connection;
} else {
this.#sql = connection();
}
}
return this.#sql;
}
}
export type DatabaseAccessor = {
sql: Sql;
};

View File

@@ -0,0 +1,175 @@
import type { Helper } from "postgres";
import type { EventRecord } from "../../../libraries/event.ts";
import type { EventsProvider } from "../../../types/adapter.ts";
import type { EventReadOptions } from "../../../types/query.ts";
import type { PostgresDatabase } from "../database.ts";
type PGEventRecord = Omit<EventRecord, "data" | "meta"> & { data: string; meta: string };
export class PostgresEventsProvider implements EventsProvider {
constructor(
readonly db: PostgresDatabase,
readonly schema?: string,
) {}
get table(): Helper<string, []> {
if (this.schema !== undefined) {
return this.db.sql(`${this.schema}.events`);
}
return this.db.sql("public.events");
}
/**
* Insert a new event record to the events table.
*
* @param record - Event record to insert.
*/
async insert(record: EventRecord): Promise<void> {
await this.db.sql`INSERT INTO ${this.table} ${this.db.sql(this.#toDriver(record))}`.catch((error) => {
throw new Error(`EventStore > 'events.insert' failed with postgres error: ${error.message}`);
});
}
/**
* Insert many new event records to the events table.
*
* @param records - Event records to insert.
* @param batchSize - Batch size for the insert loop.
*/
async insertMany(records: EventRecord[], batchSize: number = 1_000): Promise<void> {
await this.db.sql
.begin(async (sql) => {
for (let i = 0; i < records.length; i += batchSize) {
await sql`INSERT INTO ${this.table} ${this.db.sql(records.slice(i, i + batchSize).map(this.#toDriver))}`;
}
})
.catch((error) => {
throw new Error(`EventStore > 'events.insertMany' failed with postgres error: ${error.message}`);
});
}
/**
* Retrieve all the events in the events table. Optionally a cursor and direction
* can be provided to reduce the list of events returned.
*
* @param options - Find options.
*/
async get(options: EventReadOptions): Promise<EventRecord[]> {
if (options !== undefined) {
const { filter, cursor, direction, limit } = options;
return this.db.sql<PGEventRecord[]>`
SELECT * FROM ${this.table}
WHERE
${filter?.types ? this.#withTypes(filter.types) : this.db.sql``}
${cursor ? this.#withCursor(cursor, direction) : this.db.sql``}
ORDER BY created ASC
${limit ? this.#withLimit(limit) : this.db.sql``}
`.then(this.#fromDriver);
}
return this.db.sql<PGEventRecord[]>`SELECT * FROM ${this.table} ORDER BY created ASC`.then(this.#fromDriver);
}
/**
* Get events within the given stream.
*
* @param stream - Stream to fetch events for.
* @param options - Read options for modifying the result.
*/
async getByStream(stream: string, { filter, cursor, direction, limit }: EventReadOptions = {}): Promise<EventRecord[]> {
return this.db.sql<PGEventRecord[]>`
SELECT * FROM ${this.table}
WHERE
stream = ${stream}
${filter?.types ? this.#withTypes(filter.types) : this.db.sql``}
${cursor ? this.#withCursor(cursor, direction) : this.db.sql``}
ORDER BY created ASC
${limit ? this.#withLimit(limit) : this.db.sql``}
`.then(this.#fromDriver);
}
/**
* Get events within given list of streams.
*
* @param streams - Stream to get events for.
* @param options - Read options for modifying the result.
*/
async getByStreams(streams: string[], { filter, cursor, direction, limit }: EventReadOptions = {}): Promise<EventRecord[]> {
return this.db.sql<PGEventRecord[]>`
SELECT * FROM ${this.table}
WHERE
stream IN ${this.db.sql(streams)}
${filter?.types ? this.#withTypes(filter.types) : this.db.sql``}
${cursor ? this.#withCursor(cursor, direction) : this.db.sql``}
ORDER BY created ASC
${limit ? this.#withLimit(limit) : this.db.sql``}
`.then(this.#fromDriver);
}
/**
* Get a single event by its id.
*
* @param id - Event id.
*/
async getById(id: string): Promise<EventRecord | undefined> {
return this.db.sql<PGEventRecord[]>`SELECT * FROM ${this.table} WHERE id = ${id}`.then(this.#fromDriver).then(([record]) => record);
}
/**
* Check if the given event is outdated in relation to the local event data.
*/
async checkOutdated({ stream, type, created }: EventRecord): Promise<boolean> {
const count = await await this.db.sql`
SELECT COUNT(*) AS count
FROM ${this.table}
WHERE
stream = ${stream}
AND type = ${type}
AND created > ${created}
`.then((result: any) => Number(result[0]));
return count > 0;
}
/*
|--------------------------------------------------------------------------------
| Utilities
|--------------------------------------------------------------------------------
*/
#withTypes(types: string[]) {
return this.db.sql`AND type IN ${this.db.sql(types)}`;
}
#withCursor(cursor: string, direction?: 1 | -1 | "asc" | "desc") {
if (direction === "desc" || direction === -1) {
return this.db.sql`AND created < ${cursor}`;
}
return this.db.sql`AND created > ${cursor}`;
}
#withLimit(limit: number) {
return this.db.sql`LIMIT ${limit}`;
}
/*
|--------------------------------------------------------------------------------
| Parsers
|--------------------------------------------------------------------------------
*/
#fromDriver(records: PGEventRecord[]): EventRecord[] {
return records.map((record) => {
record.data = typeof record.data === "string" ? JSON.parse(record.data) : record.data;
record.meta = typeof record.meta === "string" ? JSON.parse(record.meta) : record.meta;
return record as unknown as EventRecord;
});
}
#toDriver(record: EventRecord): PGEventRecord {
return {
...record,
data: JSON.stringify(record.data),
meta: JSON.stringify(record.meta),
};
}
}

View File

@@ -0,0 +1,140 @@
import type { Helper } from "postgres";
import type { Relation, RelationPayload, RelationsProvider } from "../../../types/adapter.ts";
import type { PostgresDatabase } from "../database.ts";
export class PostgresRelationsProvider implements RelationsProvider {
constructor(
readonly db: PostgresDatabase,
readonly schema?: string,
) {}
get table(): Helper<string, []> {
if (this.schema !== undefined) {
return this.db.sql(`${this.schema}.relations`);
}
return this.db.sql("public.relations");
}
/**
* Handle incoming relation operations.
*
* @param relations - List of relation operations to execute.
*/
async handle(relations: Relation[]): Promise<void> {
await Promise.all([
this.insertMany(relations.filter((relation) => relation.op === "insert")),
this.removeMany(relations.filter((relation) => relation.op === "remove")),
]);
}
/**
* Add stream to the relations table.
*
* @param key - Relational key to add stream to.
* @param stream - Stream to add to the key.
*/
async insert(key: string, stream: string): Promise<void> {
await this.db.sql`INSERT INTO ${this.table} (key, stream) VALUES (${key}, ${stream}) ON CONFLICT DO NOTHING`.catch((error) => {
throw new Error(`EventStore > 'relations.insert' failed with postgres error: ${error.message}`);
});
}
/**
* Add stream to many relational keys onto the relations table.
*
* @param relations - Relations to insert.
* @param batchSize - Batch size for the insert loop.
*/
async insertMany(relations: RelationPayload[], batchSize: number = 1_000): Promise<void> {
await this.db.sql
.begin(async (sql) => {
for (let i = 0; i < relations.length; i += batchSize) {
const values = relations.slice(i, i + batchSize).map(({ key, stream }) => [key, stream]);
await sql`INSERT INTO ${this.table} (key, stream) VALUES ${sql(values)} ON CONFLICT DO NOTHING`;
}
})
.catch((error) => {
throw new Error(`EventStore > 'relations.insertMany' failed with postgres error: ${error.message}`);
});
}
/**
* Get a list of event streams registered under the given relational key.
*
* @param key - Relational key to get event streams for.
*/
async getByKey(key: string): Promise<string[]> {
return this.db.sql`SELECT stream FROM ${this.table} WHERE key = ${key}`
.then((rows) => rows.map(({ stream }) => stream))
.catch((error) => {
throw new Error(`EventStore > 'relations.getByKey' failed with postgres error: ${error.message}`);
});
}
/**
* Get a list of event streams registered under the given relational keys.
*
* @param keys - Relational keys to get event streams for.
*/
async getByKeys(keys: string[]): Promise<string[]> {
return this.db.sql`SELECT DISTINCT stream FROM ${this.table} WHERE key IN ${this.db.sql(keys)}`
.then((rows) => rows.map(({ stream }) => stream))
.catch((error) => {
throw new Error(`EventStore > 'relations.getByKeys' failed with postgres error: ${error.message}`);
});
}
/**
* Removes a stream from the relational table.
*
* @param key - Relational key to remove stream from.
* @param stream - Stream to remove from relation.
*/
async remove(key: string, stream: string): Promise<void> {
await this.db.sql`DELETE FROM ${this.table} WHERE key = ${key} AND stream = ${stream}`.catch((error) => {
throw new Error(`EventStore > 'relations.remove' failed with postgres error: ${error.message}`);
});
}
/**
* Removes multiple relational entries.
*
* @param relations - Relations to remove stream from.
* @param batchSize - Batch size for the insert loop.
*/
async removeMany(relations: RelationPayload[], batchSize: number = 1_000): Promise<void> {
await this.db.sql
.begin(async (sql) => {
for (let i = 0; i < relations.length; i += batchSize) {
const conditions = relations.slice(i, i + batchSize).map(({ key, stream }) => `(key = '${key}' AND stream = '${stream}')`);
await sql`DELETE FROM ${this.table} WHERE ${this.db.sql.unsafe(conditions.join(" OR "))}`;
}
})
.catch((error) => {
throw new Error(`EventStore > 'relations.removeMany' failed with postgres error: ${error.message}`);
});
}
/**
* Remove all relations bound to the given relational keys.
*
* @param keys - Relational keys to remove from the relational table.
*/
async removeByKeys(keys: string[]): Promise<void> {
await this.db.sql`DELETE FROM ${this.table} WHERE key IN ${this.db.sql(keys)}`.catch((error) => {
throw new Error(`EventStore > 'relations.removeByKeys' failed with postgres error: ${error.message}`);
});
}
/**
* Remove all relations bound to the given streams.
*
* @param streams - Streams to remove from the relational table.
*/
async removeByStreams(streams: string[]): Promise<void> {
await this.db.sql`DELETE FROM ${this.table} WHERE stream IN ${this.db.sql(streams)}`.catch((error) => {
throw new Error(`EventStore > 'relations.removeByStreams' failed with postgres error: ${error.message}`);
});
}
}

View File

@@ -0,0 +1,82 @@
import type { Helper } from "postgres";
import type { Snapshot, SnapshotsProvider } from "../../../types/adapter.ts";
import type { PostgresDatabase } from "../database.ts";
type PGSnapshot = Omit<Snapshot, "state"> & { state: string };
export class PostgresSnapshotsProvider implements SnapshotsProvider {
constructor(
readonly db: PostgresDatabase,
readonly schema?: string,
) {}
get table(): Helper<string, []> {
if (this.schema !== undefined) {
return this.db.sql(`${this.schema}.snapshots`);
}
return this.db.sql("public.snapshots");
}
/**
* Add snapshot state under given reducer stream.
*
* @param name - Name of the reducer the snapshot is attached to.
* @param stream - Stream the snapshot is attached to.
* @param cursor - Cursor timestamp for the last event used in the snapshot.
* @param state - State of the reduced events.
*/
async insert(name: string, stream: string, cursor: string, state: any): Promise<void> {
await this.db.sql`
INSERT INTO ${this.table} ${this.db.sql(this.#toDriver({ name, stream, cursor, state }))}`.catch((error) => {
throw new Error(`EventStore > 'snapshots.insert' failed with postgres error: ${error.message}`);
});
}
/**
* Get snapshot state by stream.
*
* @param name - Name of the reducer which the state was created.
* @param stream - Stream the state was reduced for.
*/
async getByStream(name: string, stream: string): Promise<Snapshot | undefined> {
return this.db.sql<PGSnapshot[]>`SELECT * FROM ${this.table} WHERE name = ${name} AND stream = ${stream}`
.then(this.#fromDriver)
.then(([snapshot]) => snapshot)
.catch((error) => {
throw new Error(`EventStore > 'snapshots.getByStream' failed with postgres error: ${error.message}`);
});
}
/**
* Removes a snapshot for the given reducer stream.
*
* @param name - Name of the reducer the snapshot is attached to.
* @param stream - Stream to remove from snapshots.
*/
async remove(name: string, stream: string): Promise<void> {
await this.db.sql`DELETE FROM ${this.table} WHERE name = ${name} AND stream = ${stream}`.catch((error) => {
throw new Error(`EventStore > 'snapshots.remove' failed with postgres error: ${error.message}`);
});
}
/*
|--------------------------------------------------------------------------------
| Parsers
|--------------------------------------------------------------------------------
*/
#fromDriver(snapshots: PGSnapshot[]): Snapshot[] {
return snapshots.map((snapshot) => {
snapshot.state = typeof snapshot.state === "string" ? JSON.parse(snapshot.state) : snapshot.state;
return snapshot as unknown as Snapshot;
});
}
#toDriver(snapshot: Snapshot): object {
return {
...snapshot,
state: JSON.stringify(snapshot.state),
};
}
}

27
deno.json Normal file
View File

@@ -0,0 +1,27 @@
{
"name": "@valkyr/event-store",
"version": "2.0.0",
"exports": {
".": "./mod.ts",
"./browser": "./adapters/browser/adapter.ts",
"./mongo": "./adapters/mongo/adapter.ts",
"./postgres": "./adapters/postgres/adapter.ts",
"./printer": "./printers/printer.ts"
},
"publish": {
"exclude": [
".github",
".vscode",
".gitignore",
"tests"
]
},
"tasks": {
"check": "deno check ./mod.ts",
"lint": "npx eslint -c eslint.config.mjs .",
"test": "deno test --allow-all",
"test:publish": "deno publish --dry-run",
"ncu": "npx ncu -u -p npm"
},
"nodeModulesDir": "auto"
}

871
deno.lock generated Normal file
View File

@@ -0,0 +1,871 @@
{
"version": "4",
"specifiers": {
"npm:@jsr/std__assert@1.0.12": "1.0.12",
"npm:@jsr/std__async@1.0.12": "1.0.12",
"npm:@jsr/std__testing@1.0.11": "1.0.11",
"npm:@jsr/valkyr__testcontainers@2.0.0": "2.0.0",
"npm:@valkyr/db@1.0.1": "1.0.1",
"npm:eslint-plugin-simple-import-sort@12.1.1": "12.1.1_eslint@9.24.0",
"npm:eslint@9.24.0": "9.24.0",
"npm:fake-indexeddb@6.0.0": "6.0.0",
"npm:mongodb@6.15.0": "6.15.0",
"npm:nanoid@5.1.5": "5.1.5",
"npm:postgres@3.4.5": "3.4.5",
"npm:postgres@^3.4.5": "3.4.5",
"npm:prettier@3.5.3": "3.5.3",
"npm:typescript-eslint@8.30.1": "8.30.1_eslint@9.24.0_typescript@5.8.3_@typescript-eslint+parser@8.30.1__eslint@9.24.0__typescript@5.8.3",
"npm:zod@next": "4.0.0-beta.20250420T053007"
},
"npm": {
"@eslint-community/eslint-utils@4.6.1_eslint@9.24.0": {
"integrity": "sha512-KTsJMmobmbrFLe3LDh0PC2FXpcSYJt/MLjlkh/9LEnmKYLSYmT/0EW9JWANjeoemiuZrmogti0tW5Ch+qNUYDw==",
"dependencies": [
"eslint",
"eslint-visitor-keys@3.4.3"
]
},
"@eslint-community/regexpp@4.12.1": {
"integrity": "sha512-CCZCDJuduB9OUkFkY2IgppNZMi2lBQgD2qzwXkEia16cge2pijY/aXi96CJMquDMn3nJdlPV1A5KrJEXwfLNzQ=="
},
"@eslint/config-array@0.20.0": {
"integrity": "sha512-fxlS1kkIjx8+vy2SjuCB94q3htSNrufYTXubwiBFeaQHbH6Ipi43gFJq2zCMt6PHhImH3Xmr0NksKDvchWlpQQ==",
"dependencies": [
"@eslint/object-schema",
"debug",
"minimatch@3.1.2"
]
},
"@eslint/config-helpers@0.2.1": {
"integrity": "sha512-RI17tsD2frtDu/3dmI7QRrD4bedNKPM08ziRYaC5AhkGrzIAJelm9kJU1TznK+apx6V+cqRz8tfpEeG3oIyjxw=="
},
"@eslint/core@0.12.0": {
"integrity": "sha512-cmrR6pytBuSMTaBweKoGMwu3EiHiEC+DoyupPmlZ0HxBJBtIxwe+j/E4XPIKNx+Q74c8lXKPwYawBf5glsTkHg==",
"dependencies": [
"@types/json-schema"
]
},
"@eslint/core@0.13.0": {
"integrity": "sha512-yfkgDw1KR66rkT5A8ci4irzDysN7FRpq3ttJolR88OqQikAWqwA8j5VZyas+vjyBNFIJ7MfybJ9plMILI2UrCw==",
"dependencies": [
"@types/json-schema"
]
},
"@eslint/eslintrc@3.3.1": {
"integrity": "sha512-gtF186CXhIl1p4pJNGZw8Yc6RlshoePRvE0X91oPGb3vZ8pM3qOS9W9NGPat9LziaBV7XrJWGylNQXkGcnM3IQ==",
"dependencies": [
"ajv",
"debug",
"espree",
"globals",
"ignore",
"import-fresh",
"js-yaml",
"minimatch@3.1.2",
"strip-json-comments"
]
},
"@eslint/js@9.24.0": {
"integrity": "sha512-uIY/y3z0uvOGX8cp1C2fiC4+ZmBhp6yZWkojtHL1YEMnRt1Y63HB9TM17proGEmeG7HeUY+UP36F0aknKYTpYA=="
},
"@eslint/object-schema@2.1.6": {
"integrity": "sha512-RBMg5FRL0I0gs51M/guSAj5/e14VQ4tpZnQNWwuDT66P14I43ItmPfIZRhO9fUVIPOAQXU47atlywZ/czoqFPA=="
},
"@eslint/plugin-kit@0.2.8": {
"integrity": "sha512-ZAoA40rNMPwSm+AeHpCq8STiNAwzWLJuP8Xv4CHIc9wv/PSuExjMrmjfYNj682vW0OOiZ1HKxzvjQr9XZIisQA==",
"dependencies": [
"@eslint/core@0.13.0",
"levn"
]
},
"@humanfs/core@0.19.1": {
"integrity": "sha512-5DyQ4+1JEUzejeK1JGICcideyfUbGixgS9jNgex5nqkW+cY7WZhxBigmieN5Qnw9ZosSNVC9KQKyb+GUaGyKUA=="
},
"@humanfs/node@0.16.6": {
"integrity": "sha512-YuI2ZHQL78Q5HbhDiBA1X4LmYdXCKCMQIfw0pw7piHJwyREFebJUvrQN4cMssyES6x+vfUbx1CIpaQUKYdQZOw==",
"dependencies": [
"@humanfs/core",
"@humanwhocodes/retry@0.3.1"
]
},
"@humanwhocodes/module-importer@1.0.1": {
"integrity": "sha512-bxveV4V8v5Yb4ncFTT3rPSgZBOpCkjfK0y4oVVVJwIuDVBRMDXrPyXRL988i5ap9m9bnyEEjWfm5WkBmtffLfA=="
},
"@humanwhocodes/retry@0.3.1": {
"integrity": "sha512-JBxkERygn7Bv/GbN5Rv8Ul6LVknS+5Bp6RgDC/O8gEBU/yeH5Ui5C/OlWrTb6qct7LjjfT6Re2NxB0ln0yYybA=="
},
"@humanwhocodes/retry@0.4.2": {
"integrity": "sha512-xeO57FpIu4p1Ri3Jq/EXq4ClRm86dVF2z/+kvFnyqVYRavTZmaFaUBbWCOuuTh0o/g7DSsk6kc2vrS4Vl5oPOQ=="
},
"@jsr/std__assert@1.0.12": {
"integrity": "sha512-9pmgjJhuljZCmLlbvsRV6aLT5+YCmhX/yIjaWYav7R7Vup2DOLAgpUOs4JkzRbwn7fdKYrwHT8+DjqPr7Ti8mg==",
"dependencies": [
"@jsr/std__internal"
]
},
"@jsr/std__async@1.0.12": {
"integrity": "sha512-NUaSOcwMetVeVkIqet2Ammy2A5YxG8ViFxryBbTaC4h7l/cgAkU59U3zF58ek4Y8HZ0Nx5De7qBptPfp62kcgw=="
},
"@jsr/std__data-structures@1.0.6": {
"integrity": "sha512-Ejc8mHLuoYxXLu2zPquvqijdgQ19OV+1DdVDrLc/Cg+tiuGh4Dq2FSnLiPINh4lO1AJ3XcZcYPx38RxdsZcCOg=="
},
"@jsr/std__fs@1.0.16": {
"integrity": "sha512-xnqp8XqEFN+ttkERg9GG+AxyipSd+rfCquLPviF5ZSwN6oCV1TM0ZNoKHXNk/EJAsz28YjF4sfgdJt8XwTV2UQ==",
"dependencies": [
"@jsr/std__path"
]
},
"@jsr/std__internal@1.0.6": {
"integrity": "sha512-1NLtCx9XAL44nt56gzmRSCgXjIthHVzK62fTkJdq8/XsP7eN9a21AZDpc0EGJ/cgvmmOB52UGh46OuKrrY7eVg=="
},
"@jsr/std__net@1.0.4": {
"integrity": "sha512-KJGU8ZpQ70sMW2Zk+wU3wFUkggS9lTLfRFBygnV9VaK8KI+1ggiqtB06rH4a14CNRGM9y46Mn/ZCbQUd4Q45Jg=="
},
"@jsr/std__path@1.0.8": {
"integrity": "sha512-eNBGlh/8ZVkMxtFH4bwIzlAeKoHYk5in4wrBZhi20zMdOiuX4QozP4+19mIXBT2lzHDjhuVLyECbhFeR304iDg=="
},
"@jsr/std__testing@1.0.11": {
"integrity": "sha512-pqQDYtIsaDf+x4NHQ+WiixRJ8DfhgFQRdlHWWssFAzIYwleR+VHLTNlgsgg+AH3mIIR+gTkBmKk21hTkM/WbMQ==",
"dependencies": [
"@jsr/std__assert",
"@jsr/std__async",
"@jsr/std__data-structures",
"@jsr/std__fs",
"@jsr/std__internal",
"@jsr/std__path"
]
},
"@jsr/valkyr__testcontainers@2.0.0": {
"integrity": "sha512-aK78hRoVyQm3M0aFucuUV7Ghfx4295fJ6Q3fSjtJizYnu10VuktKfcIh5xHhOVAISk1Zh0y3SYGIiuZiKr57vw==",
"dependencies": [
"@jsr/std__async",
"@jsr/std__fs",
"@jsr/std__net",
"mongodb",
"postgres"
]
},
"@mongodb-js/saslprep@1.2.2": {
"integrity": "sha512-EB0O3SCSNRUFk66iRCpI+cXzIjdswfCs7F6nOC3RAGJ7xr5YhaicvsRwJ9eyzYvYRlCSDUO/c7g4yNulxKC1WA==",
"dependencies": [
"sparse-bitfield"
]
},
"@nodelib/fs.scandir@2.1.5": {
"integrity": "sha512-vq24Bq3ym5HEQm2NKCr3yXDwjc7vTsEThRDnkp2DK9p1uqLR+DHurm/NOTo0KG7HYHU7eppKZj3MyqYuMBf62g==",
"dependencies": [
"@nodelib/fs.stat",
"run-parallel"
]
},
"@nodelib/fs.stat@2.0.5": {
"integrity": "sha512-RkhPPp2zrqDAQA/2jNhnztcPAlv64XdhIp7a7454A5ovI7Bukxgt7MX7udwAu3zg1DcpPU0rz3VV1SeaqvY4+A=="
},
"@nodelib/fs.walk@1.2.8": {
"integrity": "sha512-oGB+UxlgWcgQkgwo8GcEGwemoTFt3FIO9ababBmaGwXIoBKZ+GTy0pP185beGg7Llih/NSHSV2XAs1lnznocSg==",
"dependencies": [
"@nodelib/fs.scandir",
"fastq"
]
},
"@types/estree@1.0.7": {
"integrity": "sha512-w28IoSUCJpidD/TGviZwwMJckNESJZXFu7NBZ5YJ4mEUnNraUn9Pm8HSZm/jDF1pDWYKspWE7oVphigUPRakIQ=="
},
"@types/json-schema@7.0.15": {
"integrity": "sha512-5+fP8P8MFNC+AyZCDxrB2pkZFPGzqQWUzpSeuuVLvm8VMcorNYavBqoFcxK8bQz4Qsbn4oUEEem4wDLfcysGHA=="
},
"@types/webidl-conversions@7.0.3": {
"integrity": "sha512-CiJJvcRtIgzadHCYXw7dqEnMNRjhGZlYK05Mj9OyktqV8uVT8fD2BFOB7S1uwBE3Kj2Z+4UyPmFw/Ixgw/LAlA=="
},
"@types/whatwg-url@11.0.5": {
"integrity": "sha512-coYR071JRaHa+xoEvvYqvnIHaVqaYrLPbsufM9BF63HkwI5Lgmy2QR8Q5K/lYDYo5AK82wOvSOS0UsLTpTG7uQ==",
"dependencies": [
"@types/webidl-conversions"
]
},
"@typescript-eslint/eslint-plugin@8.30.1_@typescript-eslint+parser@8.30.1__eslint@9.24.0__typescript@5.8.3_eslint@9.24.0_typescript@5.8.3": {
"integrity": "sha512-v+VWphxMjn+1t48/jO4t950D6KR8JaJuNXzi33Ve6P8sEmPr5k6CEXjdGwT6+LodVnEa91EQCtwjWNUCPweo+Q==",
"dependencies": [
"@eslint-community/regexpp",
"@typescript-eslint/parser",
"@typescript-eslint/scope-manager",
"@typescript-eslint/type-utils",
"@typescript-eslint/utils",
"@typescript-eslint/visitor-keys",
"eslint",
"graphemer",
"ignore",
"natural-compare",
"ts-api-utils",
"typescript"
]
},
"@typescript-eslint/parser@8.30.1_eslint@9.24.0_typescript@5.8.3": {
"integrity": "sha512-H+vqmWwT5xoNrXqWs/fesmssOW70gxFlgcMlYcBaWNPIEWDgLa4W9nkSPmhuOgLnXq9QYgkZ31fhDyLhleCsAg==",
"dependencies": [
"@typescript-eslint/scope-manager",
"@typescript-eslint/types",
"@typescript-eslint/typescript-estree",
"@typescript-eslint/visitor-keys",
"debug",
"eslint",
"typescript"
]
},
"@typescript-eslint/scope-manager@8.30.1": {
"integrity": "sha512-+C0B6ChFXZkuaNDl73FJxRYT0G7ufVPOSQkqkpM/U198wUwUFOtgo1k/QzFh1KjpBitaK7R1tgjVz6o9HmsRPg==",
"dependencies": [
"@typescript-eslint/types",
"@typescript-eslint/visitor-keys"
]
},
"@typescript-eslint/type-utils@8.30.1_eslint@9.24.0_typescript@5.8.3": {
"integrity": "sha512-64uBF76bfQiJyHgZISC7vcNz3adqQKIccVoKubyQcOnNcdJBvYOILV1v22Qhsw3tw3VQu5ll8ND6hycgAR5fEA==",
"dependencies": [
"@typescript-eslint/typescript-estree",
"@typescript-eslint/utils",
"debug",
"eslint",
"ts-api-utils",
"typescript"
]
},
"@typescript-eslint/types@8.30.1": {
"integrity": "sha512-81KawPfkuulyWo5QdyG/LOKbspyyiW+p4vpn4bYO7DM/hZImlVnFwrpCTnmNMOt8CvLRr5ojI9nU1Ekpw4RcEw=="
},
"@typescript-eslint/typescript-estree@8.30.1_typescript@5.8.3": {
"integrity": "sha512-kQQnxymiUy9tTb1F2uep9W6aBiYODgq5EMSk6Nxh4Z+BDUoYUSa029ISs5zTzKBFnexQEh71KqwjKnRz58lusQ==",
"dependencies": [
"@typescript-eslint/types",
"@typescript-eslint/visitor-keys",
"debug",
"fast-glob",
"is-glob",
"minimatch@9.0.5",
"semver",
"ts-api-utils",
"typescript"
]
},
"@typescript-eslint/utils@8.30.1_eslint@9.24.0_typescript@5.8.3": {
"integrity": "sha512-T/8q4R9En2tcEsWPQgB5BQ0XJVOtfARcUvOa8yJP3fh9M/mXraLxZrkCfGb6ChrO/V3W+Xbd04RacUEqk1CFEQ==",
"dependencies": [
"@eslint-community/eslint-utils",
"@typescript-eslint/scope-manager",
"@typescript-eslint/types",
"@typescript-eslint/typescript-estree",
"eslint",
"typescript"
]
},
"@typescript-eslint/visitor-keys@8.30.1": {
"integrity": "sha512-aEhgas7aJ6vZnNFC7K4/vMGDGyOiqWcYZPpIWrTKuTAlsvDNKy2GFDqh9smL+iq069ZvR0YzEeq0B8NJlLzjFA==",
"dependencies": [
"@typescript-eslint/types",
"eslint-visitor-keys@4.2.0"
]
},
"@valkyr/db@1.0.1": {
"integrity": "sha512-zOvf0jbTSOtjzAgWKeD6S3/QQdtodPy+LkxfnhoggOzYhthkmZ1A8SauucFgkvIrzEp8e3IfNBHy0qQUHJRTog==",
"dependencies": [
"dot-prop",
"fast-equals",
"idb",
"mingo",
"nanoid@5.0.2",
"rfdc",
"rxjs"
]
},
"@zod/core@0.8.1": {
"integrity": "sha512-djj8hPhxIHcG8ptxITaw/Bout5HJZ9NyRbKr95Eilqwt9R0kvITwUQGDU+n+MVdsBIka5KwztmZSLti22F+P0A=="
},
"acorn-jsx@5.3.2_acorn@8.14.1": {
"integrity": "sha512-rq9s+JNhf0IChjtDXxllJ7g41oZk5SlXtp0LHwyA5cejwn7vKmKp4pPri6YEePv2PU65sAsegbXtIinmDFDXgQ==",
"dependencies": [
"acorn"
]
},
"acorn@8.14.1": {
"integrity": "sha512-OvQ/2pUDKmgfCg++xsTX1wGxfTaszcHVcTctW4UJB4hibJx2HXxxO5UmVgyjMa+ZDsiaf5wWLXYpRWMmBI0QHg=="
},
"ajv@6.12.6": {
"integrity": "sha512-j3fVLgvTo527anyYyJOGTYJbG+vnnQYvE0m5mmkc1TK+nxAppkCLMIL0aZ4dblVCNoGShhm+kzE4ZUykBoMg4g==",
"dependencies": [
"fast-deep-equal",
"fast-json-stable-stringify",
"json-schema-traverse",
"uri-js"
]
},
"ansi-styles@4.3.0": {
"integrity": "sha512-zbB9rCJAT1rbjiVDb2hqKFHNYLxgtk8NURxZ3IZwD3F6NtxbXZQCnnSi1Lkx+IDohdPlFp222wVALIheZJQSEg==",
"dependencies": [
"color-convert"
]
},
"argparse@2.0.1": {
"integrity": "sha512-8+9WqebbFzpX9OR+Wa6O29asIogeRMzcGtAINdpMHHyAg10f05aSFVBbcEqGf/PXw1EjAZ+q2/bEBg3DvurK3Q=="
},
"balanced-match@1.0.2": {
"integrity": "sha512-3oSeUO0TMV67hN1AmbXsK4yaqU7tjiHlbxRDZOpH0KW9+CeX4bRAaX0Anxt0tx2MrpRpWwQaPwIlISEJhYU5Pw=="
},
"brace-expansion@1.1.11": {
"integrity": "sha512-iCuPHDFgrHX7H2vEI/5xpz07zSHB00TpugqhmYtVmMO6518mCuRMoOYFldEBl0g187ufozdaHgWKcYFb61qGiA==",
"dependencies": [
"balanced-match",
"concat-map"
]
},
"brace-expansion@2.0.1": {
"integrity": "sha512-XnAIvQ8eM+kC6aULx6wuQiwVsnzsi9d3WxzV3FpWTGA19F621kwdbsAcFKXgKUHZWsy+mY6iL1sHTxWEFCytDA==",
"dependencies": [
"balanced-match"
]
},
"braces@3.0.3": {
"integrity": "sha512-yQbXgO/OSZVD2IsiLlro+7Hf6Q18EJrKSEsdoMzKePKXct3gvD8oLcOQdIzGupr5Fj+EDe8gO/lxc1BzfMpxvA==",
"dependencies": [
"fill-range"
]
},
"bson@6.10.3": {
"integrity": "sha512-MTxGsqgYTwfshYWTRdmZRC+M7FnG1b4y7RO7p2k3X24Wq0yv1m77Wsj0BzlPzd/IowgESfsruQCUToa7vbOpPQ=="
},
"callsites@3.1.0": {
"integrity": "sha512-P8BjAsXvZS+VIDUI11hHCQEv74YT67YUi5JJFNWIqL235sBmjX4+qx9Muvls5ivyNENctx46xQLQ3aTuE7ssaQ=="
},
"chalk@4.1.2": {
"integrity": "sha512-oKnbhFyRIXpUuez8iBMmyEa4nbj4IOQyuhc/wy9kY7/WVPcwIO9VA668Pu8RkO7+0G76SLROeyw9CpQ061i4mA==",
"dependencies": [
"ansi-styles",
"supports-color"
]
},
"color-convert@2.0.1": {
"integrity": "sha512-RRECPsj7iu/xb5oKYcsFHSppFNnsj/52OVTRKb4zP5onXwVF3zVmmToNcOfGC+CRDpfK/U584fMg38ZHCaElKQ==",
"dependencies": [
"color-name"
]
},
"color-name@1.1.4": {
"integrity": "sha512-dOy+3AuW3a2wNbZHIuMZpTcgjGuLU/uBL/ubcZF9OXbDo8ff4O8yVp5Bf0efS8uEoYo5q4Fx7dY9OgQGXgAsQA=="
},
"concat-map@0.0.1": {
"integrity": "sha512-/Srv4dswyQNBfohGpz9o6Yb3Gz3SrUDqBH5rTuhGR7ahtlbYKnVxw2bCFMRljaA7EXHaXZ8wsHdodFvbkhKmqg=="
},
"cross-spawn@7.0.6": {
"integrity": "sha512-uV2QOWP2nWzsy2aMp8aRibhi9dlzF5Hgh5SHaB9OiTGEyDTiJJyx0uy51QXdyWbtAHNua4XJzUKca3OzKUd3vA==",
"dependencies": [
"path-key",
"shebang-command",
"which"
]
},
"debug@4.4.0": {
"integrity": "sha512-6WTZ/IxCY/T6BALoZHaE4ctp9xm+Z5kY/pzYaCHRFeyVhojxlrm+46y68HA6hr0TcwEssoxNiDEUJQjfPZ/RYA==",
"dependencies": [
"ms"
]
},
"deep-is@0.1.4": {
"integrity": "sha512-oIPzksmTg4/MriiaYGO+okXDT7ztn/w3Eptv/+gSIdMdKsJo0u4CfYNFJPy+4SKMuCqGw2wxnA+URMg3t8a/bQ=="
},
"dot-prop@8.0.2": {
"integrity": "sha512-xaBe6ZT4DHPkg0k4Ytbvn5xoxgpG0jOS1dYxSOwAHPuNLjP3/OzN0gH55SrLqpx8cBfSaVt91lXYkApjb+nYdQ==",
"dependencies": [
"type-fest"
]
},
"escape-string-regexp@4.0.0": {
"integrity": "sha512-TtpcNJ3XAzx3Gq8sWRzJaVajRs0uVxA2YAkdb1jm2YkPz4G6egUFAyA3n5vtEIZefPk5Wa4UXbKuS5fKkJWdgA=="
},
"eslint-plugin-simple-import-sort@12.1.1_eslint@9.24.0": {
"integrity": "sha512-6nuzu4xwQtE3332Uz0to+TxDQYRLTKRESSc2hefVT48Zc8JthmN23Gx9lnYhu0FtkRSL1oxny3kJ2aveVhmOVA==",
"dependencies": [
"eslint"
]
},
"eslint-scope@8.3.0": {
"integrity": "sha512-pUNxi75F8MJ/GdeKtVLSbYg4ZI34J6C0C7sbL4YOp2exGwen7ZsuBqKzUhXd0qMQ362yET3z+uPwKeg/0C2XCQ==",
"dependencies": [
"esrecurse",
"estraverse"
]
},
"eslint-visitor-keys@3.4.3": {
"integrity": "sha512-wpc+LXeiyiisxPlEkUzU6svyS1frIO3Mgxj1fdy7Pm8Ygzguax2N3Fa/D/ag1WqbOprdI+uY6wMUl8/a2G+iag=="
},
"eslint-visitor-keys@4.2.0": {
"integrity": "sha512-UyLnSehNt62FFhSwjZlHmeokpRK59rcz29j+F1/aDgbkbRTk7wIc9XzdoasMUbRNKDM0qQt/+BJ4BrpFeABemw=="
},
"eslint@9.24.0": {
"integrity": "sha512-eh/jxIEJyZrvbWRe4XuVclLPDYSYYYgLy5zXGGxD6j8zjSAxFEzI2fL/8xNq6O2yKqVt+eF2YhV+hxjV6UKXwQ==",
"dependencies": [
"@eslint-community/eslint-utils",
"@eslint-community/regexpp",
"@eslint/config-array",
"@eslint/config-helpers",
"@eslint/core@0.12.0",
"@eslint/eslintrc",
"@eslint/js",
"@eslint/plugin-kit",
"@humanfs/node",
"@humanwhocodes/module-importer",
"@humanwhocodes/retry@0.4.2",
"@types/estree",
"@types/json-schema",
"ajv",
"chalk",
"cross-spawn",
"debug",
"escape-string-regexp",
"eslint-scope",
"eslint-visitor-keys@4.2.0",
"espree",
"esquery",
"esutils",
"fast-deep-equal",
"file-entry-cache",
"find-up",
"glob-parent@6.0.2",
"ignore",
"imurmurhash",
"is-glob",
"json-stable-stringify-without-jsonify",
"lodash.merge",
"minimatch@3.1.2",
"natural-compare",
"optionator"
]
},
"espree@10.3.0_acorn@8.14.1": {
"integrity": "sha512-0QYC8b24HWY8zjRnDTL6RiHfDbAWn63qb4LMj1Z4b076A4une81+z03Kg7l7mn/48PUTqoLptSXez8oknU8Clg==",
"dependencies": [
"acorn",
"acorn-jsx",
"eslint-visitor-keys@4.2.0"
]
},
"esquery@1.6.0": {
"integrity": "sha512-ca9pw9fomFcKPvFLXhBKUK90ZvGibiGOvRJNbjljY7s7uq/5YO4BOzcYtJqExdx99rF6aAcnRxHmcUHcz6sQsg==",
"dependencies": [
"estraverse"
]
},
"esrecurse@4.3.0": {
"integrity": "sha512-KmfKL3b6G+RXvP8N1vr3Tq1kL/oCFgn2NYXEtqP8/L3pKapUA4G8cFVaoF3SU323CD4XypR/ffioHmkti6/Tag==",
"dependencies": [
"estraverse"
]
},
"estraverse@5.3.0": {
"integrity": "sha512-MMdARuVEQziNTeJD8DgMqmhwR11BRQ/cBP+pLtYdSTnf3MIO8fFeiINEbX36ZdNlfU/7A9f3gUw49B3oQsvwBA=="
},
"esutils@2.0.3": {
"integrity": "sha512-kVscqXk4OCp68SZ0dkgEKVi6/8ij300KBWTJq32P/dYeWTSwK41WyTxalN1eRmA5Z9UU/LX9D7FWSmV9SAYx6g=="
},
"fake-indexeddb@6.0.0": {
"integrity": "sha512-YEboHE5VfopUclOck7LncgIqskAqnv4q0EWbYCaxKKjAvO93c+TJIaBuGy8CBFdbg9nKdpN3AuPRwVBJ4k7NrQ=="
},
"fast-deep-equal@3.1.3": {
"integrity": "sha512-f3qQ9oQy9j2AhBe/H9VC91wLmKBCCU/gDOnKNAYG5hswO7BLKj09Hc5HYNz9cGI++xlpDCIgDaitVs03ATR84Q=="
},
"fast-equals@5.0.1": {
"integrity": "sha512-WF1Wi8PwwSY7/6Kx0vKXtw8RwuSGoM1bvDaJbu7MxDlR1vovZjIAKrnzyrThgAjm6JDTu0fVgWXDlMGspodfoQ=="
},
"fast-glob@3.3.3": {
"integrity": "sha512-7MptL8U0cqcFdzIzwOTHoilX9x5BrNqye7Z/LuC7kCMRio1EMSyqRK3BEAUD7sXRq4iT4AzTVuZdhgQ2TCvYLg==",
"dependencies": [
"@nodelib/fs.stat",
"@nodelib/fs.walk",
"glob-parent@5.1.2",
"merge2",
"micromatch"
]
},
"fast-json-stable-stringify@2.1.0": {
"integrity": "sha512-lhd/wF+Lk98HZoTCtlVraHtfh5XYijIjalXck7saUtuanSDyLMxnHhSXEDJqHxD7msR8D0uCmqlkwjCV8xvwHw=="
},
"fast-levenshtein@2.0.6": {
"integrity": "sha512-DCXu6Ifhqcks7TZKY3Hxp3y6qphY5SJZmrWMDrKcERSOXWQdMhU9Ig/PYrzyw/ul9jOIyh0N4M0tbC5hodg8dw=="
},
"fastq@1.19.1": {
"integrity": "sha512-GwLTyxkCXjXbxqIhTsMI2Nui8huMPtnxg7krajPJAjnEG/iiOS7i+zCtWGZR9G0NBKbXKh6X9m9UIsYX/N6vvQ==",
"dependencies": [
"reusify"
]
},
"file-entry-cache@8.0.0": {
"integrity": "sha512-XXTUwCvisa5oacNGRP9SfNtYBNAMi+RPwBFmblZEF7N7swHYQS6/Zfk7SRwx4D5j3CH211YNRco1DEMNVfZCnQ==",
"dependencies": [
"flat-cache"
]
},
"fill-range@7.1.1": {
"integrity": "sha512-YsGpe3WHLK8ZYi4tWDg2Jy3ebRz2rXowDxnld4bkQB00cc/1Zw9AWnC0i9ztDJitivtQvaI9KaLyKrc+hBW0yg==",
"dependencies": [
"to-regex-range"
]
},
"find-up@5.0.0": {
"integrity": "sha512-78/PXT1wlLLDgTzDs7sjq9hzz0vXD+zn+7wypEe4fXQxCmdmqfGsEPQxmiCSQI3ajFV91bVSsvNtrJRiW6nGng==",
"dependencies": [
"locate-path",
"path-exists"
]
},
"flat-cache@4.0.1": {
"integrity": "sha512-f7ccFPK3SXFHpx15UIGyRJ/FJQctuKZ0zVuN3frBo4HnK3cay9VEW0R6yPYFHC0AgqhukPzKjq22t5DmAyqGyw==",
"dependencies": [
"flatted",
"keyv"
]
},
"flatted@3.3.3": {
"integrity": "sha512-GX+ysw4PBCz0PzosHDepZGANEuFCMLrnRTiEy9McGjmkCQYwRq4A/X786G/fjM/+OjsWSU1ZrY5qyARZmO/uwg=="
},
"glob-parent@5.1.2": {
"integrity": "sha512-AOIgSQCepiJYwP3ARnGx+5VnTu2HBYdzbGP45eLw1vr3zB3vZLeyed1sC9hnbcOc9/SrMyM5RPQrkGz4aS9Zow==",
"dependencies": [
"is-glob"
]
},
"glob-parent@6.0.2": {
"integrity": "sha512-XxwI8EOhVQgWp6iDL+3b0r86f4d6AX6zSU55HfB4ydCEuXLXc5FcYeOu+nnGftS4TEju/11rt4KJPTMgbfmv4A==",
"dependencies": [
"is-glob"
]
},
"globals@14.0.0": {
"integrity": "sha512-oahGvuMGQlPw/ivIYBjVSrWAfWLBeku5tpPE2fOPLi+WHffIWbuh2tCjhyQhTBPMf5E9jDEH4FOmTYgYwbKwtQ=="
},
"graphemer@1.4.0": {
"integrity": "sha512-EtKwoO6kxCL9WO5xipiHTZlSzBm7WLT627TqC/uVRd0HKmq8NXyebnNYxDoBi7wt8eTWrUrKXCOVaFq9x1kgag=="
},
"has-flag@4.0.0": {
"integrity": "sha512-EykJT/Q1KjTWctppgIAgfSO0tKVuZUjhgMr17kqTumMl6Afv3EISleU7qZUzoXDFTAHTDC4NOoG/ZxU3EvlMPQ=="
},
"idb@7.1.1": {
"integrity": "sha512-gchesWBzyvGHRO9W8tzUWFDycow5gwjvFKfyV9FF32Y7F50yZMp7mP+T2mJIWFx49zicqyC4uefHM17o6xKIVQ=="
},
"ignore@5.3.2": {
"integrity": "sha512-hsBTNUqQTDwkWtcdYI2i06Y/nUBEsNEDJKjWdigLvegy8kDuJAS8uRlpkkcQpyEXL0Z/pjDy5HBmMjRCJ2gq+g=="
},
"import-fresh@3.3.1": {
"integrity": "sha512-TR3KfrTZTYLPB6jUjfx6MF9WcWrHL9su5TObK4ZkYgBdWKPOFoSoQIdEuTuR82pmtxH2spWG9h6etwfr1pLBqQ==",
"dependencies": [
"parent-module",
"resolve-from"
]
},
"imurmurhash@0.1.4": {
"integrity": "sha512-JmXMZ6wuvDmLiHEml9ykzqO6lwFbof0GG4IkcGaENdCRDDmMVnny7s5HsIgHCbaq0w2MyPhDqkhTUgS2LU2PHA=="
},
"is-extglob@2.1.1": {
"integrity": "sha512-SbKbANkN603Vi4jEZv49LeVJMn4yGwsbzZworEoyEiutsN3nJYdbO36zfhGJ6QEDpOZIFkDtnq5JRxmvl3jsoQ=="
},
"is-glob@4.0.3": {
"integrity": "sha512-xelSayHH36ZgE7ZWhli7pW34hNbNl8Ojv5KVmkJD4hBdD3th8Tfk9vYasLM+mXWOZhFkgZfxhLSnrwRr4elSSg==",
"dependencies": [
"is-extglob"
]
},
"is-number@7.0.0": {
"integrity": "sha512-41Cifkg6e8TylSpdtTpeLVMqvSBEVzTttHvERD741+pnZ8ANv0004MRL43QKPDlK9cGvNp6NZWZUBlbGXYxxng=="
},
"isexe@2.0.0": {
"integrity": "sha512-RHxMLp9lnKHGHRng9QFhRCMbYAcVpn69smSGcq3f36xjgVVWThj4qqLbTLlq7Ssj8B+fIQ1EuCEGI2lKsyQeIw=="
},
"js-yaml@4.1.0": {
"integrity": "sha512-wpxZs9NoxZaJESJGIZTyDEaYpl0FKSA+FB9aJiyemKhMwkxQg63h4T1KJgUGHpTqPDNRcmmYLugrRjJlBtWvRA==",
"dependencies": [
"argparse"
]
},
"json-buffer@3.0.1": {
"integrity": "sha512-4bV5BfR2mqfQTJm+V5tPPdf+ZpuhiIvTuAB5g8kcrXOZpTT/QwwVRWBywX1ozr6lEuPdbHxwaJlm9G6mI2sfSQ=="
},
"json-schema-traverse@0.4.1": {
"integrity": "sha512-xbbCH5dCYU5T8LcEhhuh7HJ88HXuW3qsI3Y0zOZFKfZEHcpWiHU/Jxzk629Brsab/mMiHQti9wMP+845RPe3Vg=="
},
"json-stable-stringify-without-jsonify@1.0.1": {
"integrity": "sha512-Bdboy+l7tA3OGW6FjyFHWkP5LuByj1Tk33Ljyq0axyzdk9//JSi2u3fP1QSmd1KNwq6VOKYGlAu87CisVir6Pw=="
},
"keyv@4.5.4": {
"integrity": "sha512-oxVHkHR/EJf2CNXnWxRLW6mg7JyCCUcG0DtEGmL2ctUo1PNTin1PUil+r/+4r5MpVgC/fn1kjsx7mjSujKqIpw==",
"dependencies": [
"json-buffer"
]
},
"levn@0.4.1": {
"integrity": "sha512-+bT2uH4E5LGE7h/n3evcS/sQlJXCpIp6ym8OWJ5eV6+67Dsql/LaaT7qJBAt2rzfoa/5QBGBhxDix1dMt2kQKQ==",
"dependencies": [
"prelude-ls",
"type-check"
]
},
"locate-path@6.0.0": {
"integrity": "sha512-iPZK6eYjbxRu3uB4/WZ3EsEIMJFMqAoopl3R+zuq0UjcAm/MO6KCweDgPfP3elTztoKP3KtnVHxTn2NHBSDVUw==",
"dependencies": [
"p-locate"
]
},
"lodash.merge@4.6.2": {
"integrity": "sha512-0KpjqXRVvrYyCsX1swR/XTK0va6VQkQM6MNo7PqW77ByjAhoARA8EfrP1N4+KlKj8YS0ZUCtRT/YUuhyYDujIQ=="
},
"memory-pager@1.5.0": {
"integrity": "sha512-ZS4Bp4r/Zoeq6+NLJpP+0Zzm0pR8whtGPf1XExKLJBAczGMnSi3It14OiNCStjQjM6NU1okjQGSxgEZN8eBYKg=="
},
"merge2@1.4.1": {
"integrity": "sha512-8q7VEgMJW4J8tcfVPy8g09NcQwZdbwFEqhe/WZkoIzjn/3TGDwtOCYtXGxA3O8tPzpczCCDgv+P2P5y00ZJOOg=="
},
"micromatch@4.0.8": {
"integrity": "sha512-PXwfBhYu0hBCPw8Dn0E+WDYb7af3dSLVWKi3HGv84IdF4TyFoC0ysxFd0Goxw7nSv4T/PzEJQxsYsEiFCKo2BA==",
"dependencies": [
"braces",
"picomatch"
]
},
"mingo@6.4.6": {
"integrity": "sha512-SMp06Eo5iEthCPpKXgEZ6DTZKxknpTqj49YN6iHpapj9DKltBCv0RFu+0mBBjMU0SiHR9pYkurkk74+VFGTqxw=="
},
"minimatch@3.1.2": {
"integrity": "sha512-J7p63hRiAjw1NDEww1W7i37+ByIrOWO5XQQAzZ3VOcL0PNybwpfmV/N05zFAzwQ9USyEcX6t3UO+K5aqBQOIHw==",
"dependencies": [
"brace-expansion@1.1.11"
]
},
"minimatch@9.0.5": {
"integrity": "sha512-G6T0ZX48xgozx7587koeX9Ys2NYy6Gmv//P89sEte9V9whIapMNF4idKxnW2QtCcLiTWlb/wfCabAtAFWhhBow==",
"dependencies": [
"brace-expansion@2.0.1"
]
},
"mongodb-connection-string-url@3.0.2": {
"integrity": "sha512-rMO7CGo/9BFwyZABcKAWL8UJwH/Kc2x0g72uhDWzG48URRax5TCIcJ7Rc3RZqffZzO/Gwff/jyKwCU9TN8gehA==",
"dependencies": [
"@types/whatwg-url",
"whatwg-url"
]
},
"mongodb@6.15.0": {
"integrity": "sha512-ifBhQ0rRzHDzqp9jAQP6OwHSH7dbYIQjD3SbJs9YYk9AikKEettW/9s/tbSFDTpXcRbF+u1aLrhHxDFaYtZpFQ==",
"dependencies": [
"@mongodb-js/saslprep",
"bson",
"mongodb-connection-string-url"
]
},
"ms@2.1.3": {
"integrity": "sha512-6FlzubTLZG3J2a/NVCAleEhjzq5oxgHyaCU9yYXvcLsvoVaHJq/s5xXI6/XXP6tz7R9xAOtHnSO/tXtF3WRTlA=="
},
"nanoid@5.0.2": {
"integrity": "sha512-2ustYUX1R2rL/Br5B/FMhi8d5/QzvkJ912rBYxskcpu0myTHzSZfTr1LAS2Sm7jxRUObRrSBFoyzwAhL49aVSg=="
},
"nanoid@5.1.5": {
"integrity": "sha512-Ir/+ZpE9fDsNH0hQ3C68uyThDXzYcim2EqcZ8zn8Chtt1iylPT9xXJB0kPCnqzgcEGikO9RxSrh63MsmVCU7Fw=="
},
"natural-compare@1.4.0": {
"integrity": "sha512-OWND8ei3VtNC9h7V60qff3SVobHr996CTwgxubgyQYEpg290h9J0buyECNNJexkFm5sOajh5G116RYA1c8ZMSw=="
},
"optionator@0.9.4": {
"integrity": "sha512-6IpQ7mKUxRcZNLIObR0hz7lxsapSSIYNZJwXPGeF0mTVqGKFIXj1DQcMoT22S3ROcLyY/rz0PWaWZ9ayWmad9g==",
"dependencies": [
"deep-is",
"fast-levenshtein",
"levn",
"prelude-ls",
"type-check",
"word-wrap"
]
},
"p-limit@3.1.0": {
"integrity": "sha512-TYOanM3wGwNGsZN2cVTYPArw454xnXj5qmWF1bEoAc4+cU/ol7GVh7odevjp1FNHduHc3KZMcFduxU5Xc6uJRQ==",
"dependencies": [
"yocto-queue"
]
},
"p-locate@5.0.0": {
"integrity": "sha512-LaNjtRWUBY++zB5nE/NwcaoMylSPk+S+ZHNB1TzdbMJMny6dynpAGt7X/tl/QYq3TIeE6nxHppbo2LGymrG5Pw==",
"dependencies": [
"p-limit"
]
},
"parent-module@1.0.1": {
"integrity": "sha512-GQ2EWRpQV8/o+Aw8YqtfZZPfNRWZYkbidE9k5rpl/hC3vtHHBfGm2Ifi6qWV+coDGkrUKZAxE3Lot5kcsRlh+g==",
"dependencies": [
"callsites"
]
},
"path-exists@4.0.0": {
"integrity": "sha512-ak9Qy5Q7jYb2Wwcey5Fpvg2KoAc/ZIhLSLOSBmRmygPsGwkVVt0fZa0qrtMz+m6tJTAHfZQ8FnmB4MG4LWy7/w=="
},
"path-key@3.1.1": {
"integrity": "sha512-ojmeN0qd+y0jszEtoY48r0Peq5dwMEkIlCOu6Q5f41lfkswXuKtYrhgoTpLnyIcHm24Uhqx+5Tqm2InSwLhE6Q=="
},
"picomatch@2.3.1": {
"integrity": "sha512-JU3teHTNjmE2VCGFzuY8EXzCDVwEqB2a8fsIvwaStHhAWJEeVd1o1QD80CU6+ZdEXXSLbSsuLwJjkCBWqRQUVA=="
},
"postgres@3.4.5": {
"integrity": "sha512-cDWgoah1Gez9rN3H4165peY9qfpEo+SA61oQv65O3cRUE1pOEoJWwddwcqKE8XZYjbblOJlYDlLV4h67HrEVDg=="
},
"prelude-ls@1.2.1": {
"integrity": "sha512-vkcDPrRZo1QZLbn5RLGPpg/WmIQ65qoWWhcGKf/b5eplkkarX0m9z8ppCat4mlOqUsWpyNuYgO3VRyrYHSzX5g=="
},
"prettier@3.5.3": {
"integrity": "sha512-QQtaxnoDJeAkDvDKWCLiwIXkTgRhwYDEQCghU9Z6q03iyek/rxRh/2lC3HB7P8sWT2xC/y5JDctPLBIGzHKbhw=="
},
"punycode@2.3.1": {
"integrity": "sha512-vYt7UD1U9Wg6138shLtLOvdAu+8DsC/ilFtEVHcH+wydcSpNE20AfSOduf6MkRFahL5FY7X1oU7nKVZFtfq8Fg=="
},
"queue-microtask@1.2.3": {
"integrity": "sha512-NuaNSa6flKT5JaSYQzJok04JzTL1CA6aGhv5rfLW3PgqA+M2ChpZQnAC8h8i4ZFkBS8X5RqkDBHA7r4hej3K9A=="
},
"resolve-from@4.0.0": {
"integrity": "sha512-pb/MYmXstAkysRFx8piNI1tGFNQIFA3vkE3Gq4EuA1dF6gHp/+vgZqsCGJapvy8N3Q+4o7FwvquPJcnZ7RYy4g=="
},
"reusify@1.1.0": {
"integrity": "sha512-g6QUff04oZpHs0eG5p83rFLhHeV00ug/Yf9nZM6fLeUrPguBTkTQOdpAWWspMh55TZfVQDPaN3NQJfbVRAxdIw=="
},
"rfdc@1.3.0": {
"integrity": "sha512-V2hovdzFbOi77/WajaSMXk2OLm+xNIeQdMMuB7icj7bk6zi2F8GGAxigcnDFpJHbNyNcgyJDiP+8nOrY5cZGrA=="
},
"run-parallel@1.2.0": {
"integrity": "sha512-5l4VyZR86LZ/lDxZTR6jqL8AFE2S0IFLMP26AbjsLVADxHdhB/c0GUsH+y39UfCi3dzz8OlQuPmnaJOMoDHQBA==",
"dependencies": [
"queue-microtask"
]
},
"rxjs@7.8.1": {
"integrity": "sha512-AA3TVj+0A2iuIoQkWEK/tqFjBq2j+6PO6Y0zJcvzLAFhEFIO3HL0vls9hWLncZbAAbK0mar7oZ4V079I/qPMxg==",
"dependencies": [
"tslib"
]
},
"semver@7.7.1": {
"integrity": "sha512-hlq8tAfn0m/61p4BVRcPzIGr6LKiMwo4VM6dGi6pt4qcRkmNzTcWq6eCEjEh+qXjkMDvPlOFFSGwQjoEa6gyMA=="
},
"shebang-command@2.0.0": {
"integrity": "sha512-kHxr2zZpYtdmrN1qDjrrX/Z1rR1kG8Dx+gkpK1G4eXmvXswmcE1hTWBWYUzlraYw1/yZp6YuDY77YtvbN0dmDA==",
"dependencies": [
"shebang-regex"
]
},
"shebang-regex@3.0.0": {
"integrity": "sha512-7++dFhtcx3353uBaq8DDR4NuxBetBzC7ZQOhmTQInHEd6bSrXdiEyzCvG07Z44UYdLShWUyXt5M/yhz8ekcb1A=="
},
"sparse-bitfield@3.0.3": {
"integrity": "sha512-kvzhi7vqKTfkh0PZU+2D2PIllw2ymqJKujUcyPMd9Y75Nv4nPbGJZXNhxsgdQab2BmlDct1YnfQCguEvHr7VsQ==",
"dependencies": [
"memory-pager"
]
},
"strip-json-comments@3.1.1": {
"integrity": "sha512-6fPc+R4ihwqP6N/aIv2f1gMH8lOVtWQHoqC4yK6oSDVVocumAsfCqjkXnqiYMhmMwS/mEHLp7Vehlt3ql6lEig=="
},
"supports-color@7.2.0": {
"integrity": "sha512-qpCAvRl9stuOHveKsn7HncJRvv501qIacKzQlO/+Lwxc9+0q2wLyv4Dfvt80/DPn2pqOBsJdDiogXGR9+OvwRw==",
"dependencies": [
"has-flag"
]
},
"to-regex-range@5.0.1": {
"integrity": "sha512-65P7iz6X5yEr1cwcgvQxbbIw7Uk3gOy5dIdtZ4rDveLqhrdJP+Li/Hx6tyK0NEb+2GCyneCMJiGqrADCSNk8sQ==",
"dependencies": [
"is-number"
]
},
"tr46@5.1.1": {
"integrity": "sha512-hdF5ZgjTqgAntKkklYw0R03MG2x/bSzTtkxmIRw/sTNV8YXsCJ1tfLAX23lhxhHJlEf3CRCOCGGWw3vI3GaSPw==",
"dependencies": [
"punycode"
]
},
"ts-api-utils@2.1.0_typescript@5.8.3": {
"integrity": "sha512-CUgTZL1irw8u29bzrOD/nH85jqyc74D6SshFgujOIA7osm2Rz7dYH77agkx7H4FBNxDq7Cjf+IjaX/8zwFW+ZQ==",
"dependencies": [
"typescript"
]
},
"tslib@2.8.1": {
"integrity": "sha512-oJFu94HQb+KVduSUQL7wnpmqnfmLsOA/nAh6b6EH0wCEoK0/mPeXU6c3wKDV83MkOuHPRHtSXKKU99IBazS/2w=="
},
"type-check@0.4.0": {
"integrity": "sha512-XleUoc9uwGXqjWwXaUTZAmzMcFZ5858QA2vvx1Ur5xIcixXIP+8LnFDgRplU30us6teqdlskFfu+ae4K79Ooew==",
"dependencies": [
"prelude-ls"
]
},
"type-fest@3.13.1": {
"integrity": "sha512-tLq3bSNx+xSpwvAJnzrK0Ep5CLNWjvFTOp71URMaAEWBfRb9nnJiBoUe0tF8bI4ZFO3omgBR6NvnbzVUT3Ly4g=="
},
"typescript-eslint@8.30.1_eslint@9.24.0_typescript@5.8.3_@typescript-eslint+parser@8.30.1__eslint@9.24.0__typescript@5.8.3": {
"integrity": "sha512-D7lC0kcehVH7Mb26MRQi64LMyRJsj3dToJxM1+JVTl53DQSV5/7oUGWQLcKl1C1KnoVHxMMU2FNQMffr7F3Row==",
"dependencies": [
"@typescript-eslint/eslint-plugin",
"@typescript-eslint/parser",
"@typescript-eslint/utils",
"eslint",
"typescript"
]
},
"typescript@5.8.3": {
"integrity": "sha512-p1diW6TqL9L07nNxvRMM7hMMw4c5XOo/1ibL4aAIGmSAt9slTE1Xgw5KWuof2uTOvCg9BY7ZRi+GaF+7sfgPeQ=="
},
"uri-js@4.4.1": {
"integrity": "sha512-7rKUyy33Q1yc98pQ1DAmLtwX109F7TIfWlW1Ydo8Wl1ii1SeHieeh0HHfPeL2fMXK6z0s8ecKs9frCuLJvndBg==",
"dependencies": [
"punycode"
]
},
"webidl-conversions@7.0.0": {
"integrity": "sha512-VwddBukDzu71offAQR975unBIGqfKZpM+8ZX6ySk8nYhVoo5CYaZyzt3YBvYtRtO+aoGlqxPg/B87NGVZ/fu6g=="
},
"whatwg-url@14.2.0": {
"integrity": "sha512-De72GdQZzNTUBBChsXueQUnPKDkg/5A5zp7pFDuQAj5UFoENpiACU0wlCvzpAGnTkj++ihpKwKyYewn/XNUbKw==",
"dependencies": [
"tr46",
"webidl-conversions"
]
},
"which@2.0.2": {
"integrity": "sha512-BLI3Tl1TW3Pvl70l3yq3Y64i+awpwXqsGBYWkkqMtnbXgrMD+yj7rhW0kuEDxzJaYXGjEW5ogapKNMEKNMjibA==",
"dependencies": [
"isexe"
]
},
"word-wrap@1.2.5": {
"integrity": "sha512-BN22B5eaMMI9UMtjrGd5g5eCYPpCPDUy0FJXbYsaT5zYxjFOckS53SQDE3pWkVoWpHXVb3BrYcEN4Twa55B5cA=="
},
"yocto-queue@0.1.0": {
"integrity": "sha512-rVksvsnNCdJ/ohGc6xgPwyN8eheCxsiLM8mxuE/t/mOVqJewPuO1miLpTHQiRgTKCLexL4MeAFVagts7HmNZ2Q=="
},
"zod@4.0.0-beta.20250420T053007": {
"integrity": "sha512-5pp8Q0PNDaNcUptGiBE9akyioJh3RJpagIxrLtAVMR9IxwcSZiOsJD/1/98CyhItdTlI2H91MfhhLzRlU+fifA==",
"dependencies": [
"@zod/core"
]
}
},
"workspace": {
"packageJson": {
"dependencies": [
"npm:@jsr/std__assert@1.0.12",
"npm:@jsr/std__async@1.0.12",
"npm:@jsr/std__testing@1.0.11",
"npm:@jsr/valkyr__testcontainers@2.0.0",
"npm:@valkyr/db@1.0.1",
"npm:eslint-plugin-simple-import-sort@12.1.1",
"npm:eslint@9.24.0",
"npm:fake-indexeddb@6.0.0",
"npm:mongodb@6.15.0",
"npm:nanoid@5.1.5",
"npm:postgres@3.4.5",
"npm:prettier@3.5.3",
"npm:typescript-eslint@8.30.1",
"npm:zod@next"
]
}
}
}

30
eslint.config.mjs Normal file
View File

@@ -0,0 +1,30 @@
import simpleImportSort from "eslint-plugin-simple-import-sort";
import tseslint from "typescript-eslint";
export default [
...tseslint.configs.recommended,
{
plugins: {
"simple-import-sort": simpleImportSort,
},
rules: {
"simple-import-sort/imports": "error",
"simple-import-sort/exports": "error",
},
},
{
files: ["**/*.ts"],
rules: {
"@typescript-eslint/ban-ts-comment": ["error", {
"ts-expect-error": "allow-with-description",
minimumDescriptionLength: 10,
}],
"@typescript-eslint/ban-types": "off",
"@typescript-eslint/no-explicit-any": "off",
"@typescript-eslint/no-unused-vars": ["error", {
argsIgnorePattern: "^_",
varsIgnorePattern: "^_",
}],
},
},
];

View File

@@ -0,0 +1,74 @@
import { AggregateRootClass } from "./aggregate.ts";
import { EventFactory } from "./event-factory.ts";
import { AnyEventStore } from "./event-store.ts";
/**
* Indexes a list of event factories for use with aggregates and event stores
* when generating or accessing event functionality.
*
* @example
*
* ```ts
* import { AggregateRoot, AggregateFactory } from "@valkyr/event-store";
* import z from "zod";
*
* class User extends AggregateRoot {}
*
* const factory = new AggregateFactory([User]);
*
* export type Aggregates = typeof factory.$aggregates;
* ```
*/
export class AggregateFactory<
const TEventFactory extends EventFactory = EventFactory,
const TAggregates extends AggregateRootClass<TEventFactory>[] = AggregateRootClass<TEventFactory>[],
> {
/**
* Optimized aggregate lookup index.
*/
readonly #index = new Map<TAggregates[number]["name"], TAggregates[number]>();
aggregates: TAggregates;
/**
* Inferred type of the aggregates registered with the factory.
*/
declare readonly $aggregates: TAggregates;
/**
* Instantiate a new AggregateFactory with given list of supported aggregates.
*
* @param aggregates - Aggregates to register with the factory.
*/
constructor(aggregates: TAggregates) {
this.aggregates = aggregates;
for (const aggregate of aggregates) {
this.#index.set(aggregate.name, aggregate);
}
}
/**
* Attaches the given store to all the aggregates registered with this instance.
*
* If the factory is passed into multiple event stores, the aggregates will be
* overriden by the last execution. Its recommended to create individual instances
* for each list of aggregates.
*
* @param store - Event store to attach to the aggregates.
*/
withStore(store: AnyEventStore): this {
for (const aggregate of this.aggregates) {
aggregate.$store = store;
}
return this;
}
/**
* Get a registered aggregate from the factory.
*
* @param name - Aggregate to retrieve.
*/
get<TName extends TAggregates[number]["name"]>(name: TName): Extract<TAggregates[number], { name: TName }> {
return this.#index.get(name) as Extract<TAggregates[number], { name: TName }>;
}
}

168
libraries/aggregate.ts Normal file
View File

@@ -0,0 +1,168 @@
import type { AnyEventStore, EventsInsertSettings } from "../libraries/event-store.ts";
import type { Unknown } from "../types/common.ts";
import { EventFactory } from "./event-factory.ts";
/**
* Represents an aggregate root in an event-sourced system.
*
* This abstract class serves as a base for domain aggregates that manage
* state changes through events. It provides functionality for creating
* instances from snapshots, handling pending events, and committing
* changes to an event store.
*
* @template TEvent - The type of events associated with this aggregate.
*/
export abstract class AggregateRoot<TEventFactory extends EventFactory> {
/**
* Unique identifier allowing for easy indexing of aggregate lists.
*/
static readonly name: string;
/**
* Event store to transact against.
*/
protected static _store?: AnyEventStore;
/**
* List of pending records to push to the parent event store.
*/
#pending: TEventFactory["$events"][number]["$record"][] = [];
// -------------------------------------------------------------------------
// Accessors
// -------------------------------------------------------------------------
static get $store(): AnyEventStore {
if (this._store === undefined) {
throw new Error(`Aggregate Root > Failed to retrieve store for '${this.name}', no store has been attached.`);
}
return this._store;
}
static set $store(store: AnyEventStore) {
// if (this._store !== undefined) {
// throw new Error(`Aggregate '${this.constructor.name}' already has store assigned`);
// }
this._store = store;
}
/**
* Get store instance attached to the static aggregate.
*/
get $store(): AnyEventStore {
return (this.constructor as any).$store;
}
/**
* Does the aggregate have pending events to submit to the event store.
*/
get isDirty(): boolean {
return this.#pending.length > 0;
}
// -------------------------------------------------------------------------
// Factories
// -------------------------------------------------------------------------
/**
* Create a new aggregate instance with an optional snapshot. This method
* exists as a unified way to create new aggregates from a event store
* adapter and not really meant for aggregate creation outside of the
* event store.
*
* @param snapshot - Snapshot to assign to the aggregate state.
*/
static from<TEventFactory extends EventFactory, TAggregateRoot extends typeof AggregateRoot<TEventFactory>>(
this: TAggregateRoot,
snapshot?: Unknown,
): InstanceType<TAggregateRoot> {
const instance = new (this as any)();
if (snapshot !== undefined) {
Object.assign(instance, snapshot);
}
return instance;
}
// -------------------------------------------------------------------------
// Events
// -------------------------------------------------------------------------
/**
* Push a new event record to the pending list of events to commit to
* a event store. This also submits the record to the `.with`
* aggregate folder to update the aggregate state.
*
* @example
*
* const foo = await eventStore.aggregate("foo");
*
* foo.push({
* type: "foo:bar-set",
* stream: foo.id,
* data: { bar: "foobar" }
* });
*
* await foo.save();
*
* @param event - Event to push into the pending commit pool.
*/
push<TType extends TEventFactory["$events"][number]["state"]["type"]>(
record: { type: TType } & Extract<TEventFactory["$events"][number], { state: { type: TType } }>["$payload"],
): this {
const pending = this.$store.event(record);
this.#pending.push(pending);
this.with(pending);
return this;
}
/**
* Processes and applies incoming events to update the aggregate state.
*
* @param record - Event record to fold.
*/
abstract with(record: TEventFactory["$events"][number]["$record"]): void;
// -------------------------------------------------------------------------
// Mutators
// -------------------------------------------------------------------------
/**
* Saves all pending events to the attached event store.
*
* @param settings - Event insert settings.
* @param flush - Empty the pending event list after event store push.
*/
async save(settings?: EventsInsertSettings, flush = true): Promise<this> {
if (this.isDirty === false) {
return this;
}
await this.$store.pushManyEvents(this.#pending, settings);
if (flush === true) {
this.flush();
}
return this;
}
/**
* Removes all events from the aggregate #pending list.
*/
flush(): this {
this.#pending = [];
return this;
}
// -------------------------------------------------------------------------
// Converters
// -------------------------------------------------------------------------
/**
* Returns the aggregate pending event record list. This allows for
* extraction of the pending commit list so that it can be used in
* event submission across multiple aggregates.
*/
toPending(): TEventFactory["$events"][number]["$record"][] {
return this.#pending;
}
}
export type AggregateRootClass<TEventFactory extends EventFactory> = typeof AggregateRoot<TEventFactory>;

122
libraries/errors.ts Normal file
View File

@@ -0,0 +1,122 @@
/**
* Error thrown when an expected event is missing from the event store.
*
* This occurs when an event type has not been registered or cannot be found
* within the event store instance.
*
* @property type - The type of error, always `"EventMissingError"`.
*/
export class EventMissingError extends Error {
readonly type = "EventMissingError";
constructor(type: string) {
super(`EventStore Error: Event '${type}' has not been registered with the event store instance.`);
}
}
/*
|--------------------------------------------------------------------------------
| Event Errors
|--------------------------------------------------------------------------------
*/
/**
* Error thrown when an event fails validation checks.
*
* This error indicates that an invalid event was provided during an insertion
* process.
*
* @property type - Type of error, always `"EventValidationError"`.
* @property errors - List of issues during validation.
*/
export class EventValidationError extends Error {
readonly type = "EventValidationError";
constructor(
readonly event: any,
readonly errors: string[],
) {
super([`✖ Failed to validate '${event.type}' event!`, ...errors].join("\n"));
}
}
/**
* Error thrown when an event fails to be inserted into the event store.
*
* This error occurs when an issue arises during the insertion of an
* event into storage, such as a constraint violation or storage failure.
*
* @property type - The type of error, always `"EventInsertionError"`.
*/
export class EventInsertionError extends Error {
readonly type = "EventInsertionError";
}
/*
|--------------------------------------------------------------------------------
| Hybrid Logical Clock Errors
|--------------------------------------------------------------------------------
*/
/**
* Error thrown when a forward time jump exceeds the allowed tolerance in a Hybrid Logical Clock (HLC).
*
* This error occurs when the system detects a time jump beyond the configured tolerance,
* which may indicate clock synchronization issues in a distributed system.
*
* @property type - The type of error, always `"ForwardJumpError"`.
* @property timejump - The detected forward time jump in milliseconds.
* @property tolerance - The allowed maximum time jump tolerance in milliseconds.
*/
export class HLCForwardJumpError extends Error {
readonly type = "ForwardJumpError";
constructor(
readonly timejump: number,
readonly tolerance: number,
) {
super(`HLC Violation: Detected a forward time jump of ${timejump}ms, which exceed the allowed tolerance of ${tolerance}ms.`);
}
}
/**
* Error thrown when the received HLC timestamp is ahead of the system's wall time beyond the allowed offset.
*
* This error ensures that timestamps do not drift too far ahead of real time,
* preventing inconsistencies in distributed event ordering.
*
* @property type - The type of error, always `"ClockOffsetError"`.
* @property offset - The difference between the received time and the system's wall time in milliseconds.
* @property maxOffset - The maximum allowed clock offset in milliseconds.
*/
export class HLCClockOffsetError extends Error {
readonly type = "ClockOffsetError";
constructor(
readonly offset: number,
readonly maxOffset: number,
) {
super(`HLC Violation: Received time is ${offset}ms ahead of the wall time, exceeding the 'maxOffset' limit of ${maxOffset}ms.`);
}
}
/**
* Error thrown when the Hybrid Logical Clock (HLC) wall time exceeds the defined maximum limit.
*
* This error prevents time overflow issues that could lead to incorrect event ordering
* in a distributed system.
*
* @property type - The type of error, always `"WallTimeOverflowError"`.
* @property time - The current HLC wall time in milliseconds.
* @property maxTime - The maximum allowed HLC wall time in milliseconds.
*/
export class HLCWallTimeOverflowError extends Error {
readonly type = "WallTimeOverflowError";
constructor(
readonly time: number,
readonly maxTime: number,
) {
super(`HLC Violation: Wall time ${time}ms exceeds the max time of ${maxTime}ms.`);
}
}

View File

@@ -0,0 +1,53 @@
import { Event } from "./event.ts";
/**
* Indexes a list of event factories for use with aggregates and event stores
* when generating or accessing event functionality.
*
* @example
*
* ```ts
* import { event } from "@valkyr/event-store";
* import z from "zod";
*
* const factory = new EventFactory([
* event
* .type("user:created")
* .data(z.object({ name: z.string(), email: z.email() }))
* .meta(z.object({ createdBy: z.string() })),
* ]);
*
* export type Events = typeof factory.$events;
* ```
*/
export class EventFactory<const TEvents extends Event[] = Event[]> {
/**
* Optimized event lookup index.
*/
readonly #index = new Map<TEvents[number]["state"]["type"], TEvents[number]>();
/**
* Inferred type of the events registered with the factory.
*/
declare readonly $events: TEvents;
/**
* Instantiate a new EventFactory with given list of supported events.
*
* @param events - Events to register with the factory.
*/
constructor(readonly events: TEvents) {
for (const event of events) {
this.#index.set(event.state.type, event);
}
}
/**
* Get a registered event from the factory.
*
* @param type - Event type to retrieve.
*/
get<TType extends TEvents[number]["state"]["type"]>(type: TType): Extract<TEvents[number], { state: { type: TType } }> {
return this.#index.get(type) as Extract<TEvents[number], { state: { type: TType } }>;
}
}

557
libraries/event-store.ts Normal file
View File

@@ -0,0 +1,557 @@
/**
* @module
*
* This module contains an abstract event store solution that can take a variety of
* provider adapters to support multiple storage drivers.
*
* @example
* ```ts
* import { EventStore } from "@valkyr/event-store";
* import { z } from "zod";
*
* const eventStore = new EventStore({
* adapter: {
* providers: {
* event: new EventProvider(db),
* relations: new RelationsProvider(db),
* snapshot: new SnapshotProvider(db),
* },
* },
* events: [
* event
* .type("user:created")
* .data(
* z.strictObject({
* name: z.string(),
* email: z.string().check(z.email())
* }),
* )
* .meta(z.string()),
* ],
* });
* ```
*/
import { EventStoreAdapter } from "../types/adapter.ts";
import type { Unknown } from "../types/common.ts";
import type { EventReadOptions, ReduceQuery } from "../types/query.ts";
import type { AggregateRoot } from "./aggregate.ts";
import { AggregateFactory } from "./aggregate-factory.ts";
import { EventInsertionError, EventMissingError, EventValidationError } from "./errors.ts";
import { EventStatus } from "./event.ts";
import { EventFactory } from "./event-factory.ts";
import type { InferReducerState, Reducer, ReducerLeftFold, ReducerState } from "./reducer.ts";
import { makeAggregateReducer, makeReducer } from "./reducer.ts";
/*
|--------------------------------------------------------------------------------
| Event Store
|--------------------------------------------------------------------------------
*/
/**
* Provides a common interface to interact with a event storage solution. Its built
* on an adapter pattern to allow for multiple different storage drivers.
*/
export class EventStore<
TEventFactory extends EventFactory,
TAggregateFactory extends AggregateFactory<TEventFactory>,
TEventStoreAdapter extends EventStoreAdapter<any>,
> {
readonly #adapter: TEventStoreAdapter;
readonly #events: TEventFactory;
readonly #aggregates: TAggregateFactory;
readonly #snapshot: "manual" | "auto";
readonly #hooks: EventStoreHooks<TEventFactory>;
declare readonly $events: TEventFactory["$events"];
declare readonly $records: TEventFactory["$events"][number]["$record"][];
constructor(config: EventStoreConfig<TEventFactory, TAggregateFactory, TEventStoreAdapter>) {
this.#adapter = config.adapter;
this.#events = config.events;
this.#aggregates = config.aggregates.withStore(this);
this.#snapshot = config.snapshot ?? "manual";
this.#hooks = config.hooks ?? {};
}
/*
|--------------------------------------------------------------------------------
| Accessors
|--------------------------------------------------------------------------------
*/
get db(): TEventStoreAdapter["db"] {
return this.#adapter.db;
}
get events(): TEventStoreAdapter["providers"]["events"] {
return this.#adapter.providers.events;
}
get relations(): TEventStoreAdapter["providers"]["relations"] {
return this.#adapter.providers.relations;
}
get snapshots(): TEventStoreAdapter["providers"]["snapshots"] {
return this.#adapter.providers.snapshots;
}
/*
|--------------------------------------------------------------------------------
| Event Handlers
|--------------------------------------------------------------------------------
*/
onEventsInserted(fn: EventStoreHooks<TEventFactory>["onEventsInserted"]) {
this.#hooks.onEventsInserted = fn;
}
/*
|--------------------------------------------------------------------------------
| Aggregates
|--------------------------------------------------------------------------------
*/
/**
* Get aggregate uninstantiated class.
*
* @param name - Aggregate name to retrieve.
*/
aggregate<TName extends TAggregateFactory["$aggregates"][number]["name"]>(name: TName): Extract<TAggregateFactory["$aggregates"][number], { name: TName }> {
return this.#aggregates.get(name) as Extract<TAggregateFactory["$aggregates"][number], { name: TName }>;
}
/**
* Takes in an aggregate and commits any pending events to the event store.
*
* @param aggregate - Aggregate to push events from.
* @param settings - Event settings which can modify insertion behavior.
*/
async pushAggregate(aggregate: InstanceType<TAggregateFactory["$aggregates"][number]>, settings?: EventsInsertSettings): Promise<void> {
await aggregate.save(settings);
}
/**
* Takes a list of aggregates and commits any pending events to the event store.
* Events are committed in order so its important to ensure that the aggregates
* are placed in the correct index position of the array.
*
* This method allows for a simpler way to commit many events over many
* aggregates in a single transaction. Ensuring atomicity of a larger group
* of events.
*
* @param aggregates - Aggregates to push events from.
* @param settings - Event settings which can modify insertion behavior.
*/
async pushManyAggregates(aggregates: InstanceType<TAggregateFactory["$aggregates"][number]>[], settings?: EventsInsertSettings): Promise<void> {
const events: this["$events"][number]["$record"][] = [];
for (const aggregate of aggregates) {
events.push(...aggregate.toPending());
}
await this.pushManyEvents(events, settings);
for (const aggregate of aggregates) {
aggregate.flush();
}
}
/*
|--------------------------------------------------------------------------------
| Events
|--------------------------------------------------------------------------------
*/
/**
* Event factory producing a new event record from one of the events registered
* with the event store instance.
*
* @param payload - Event payload to pass to an available factory.
*/
event<TType extends TEventFactory["$events"][number]["state"]["type"]>(
payload: { type: TType } & Extract<TEventFactory["$events"][number], { state: { type: TType } }>["$payload"],
): Extract<TEventFactory["$events"][number], { state: { type: TType } }>["$record"] {
const event = this.#events.get((payload as any).type);
if (event === undefined) {
throw new Error(`Event '${(payload as any).type}' not found`);
}
return event.record(payload);
}
/**
* Insert an event record to the local event store database.
*
* @param record - Event record to insert.
* @param settings - Event settings which can modify insertion behavior.
*/
async pushEvent(record: this["$events"][number]["$record"], settings: EventsInsertSettings = {}): Promise<void> {
const event = this.#events.get(record.type);
if (event === undefined) {
throw new EventMissingError(record.type);
}
const validation = event.validate(record);
if (validation.success === false) {
throw new EventValidationError(record, validation.errors);
}
await this.events.insert(record).catch((error) => {
throw new EventInsertionError(error.message);
});
if (settings.emit !== false) {
await this.#hooks.onEventsInserted?.([record], settings).catch(this.#hooks.onError ?? console.error);
}
}
/**
* Add many events in strict sequence to the events table.
*
* This method runs in a transaction and will fail all events if one or more
* insertion failures occurs.
*
* @param records - List of event records to insert.
* @param settings - Event settings which can modify insertion behavior.
*/
async pushManyEvents(records: this["$events"][number]["$record"][], settings: EventsInsertSettings = {}): Promise<void> {
const events: this["$events"][number]["$record"][] = [];
for (const record of records) {
const event = this.#events.get(record.type);
if (event === undefined) {
throw new EventMissingError(record.type);
}
const validation = event.validate(record);
if (validation.success === false) {
throw new EventValidationError(record, validation.errors);
}
events.push(record);
}
await this.events.insertMany(events).catch((error) => {
throw new EventInsertionError(error.message);
});
if (settings.emit !== false) {
await this.#hooks.onEventsInserted?.(events, settings).catch(this.#hooks.onError ?? console.error);
}
}
/**
* Enable the ability to check an incoming events status in relation to the local
* ledger. This is to determine what actions to take upon the ledger based on the
* current status.
*
* **Exists**
*
* References the existence of the event in the local ledger. It is determined by
* looking at the recorded event id which should be unique to the entirety of the
* ledger.
*
* **Outdated**
*
* References the events created relationship to the same event type in the
* hosted stream. If another event of the same type in the streamis newer than
* the provided event, the provided event is considered outdated.
*/
async getEventStatus(event: this["$events"][number]["$record"]): Promise<EventStatus> {
const record = await this.events.getById(event.id);
if (record) {
return { exists: true, outdated: true };
}
return { exists: false, outdated: await this.events.checkOutdated(event) };
}
/**
* Retrieve events from the events table.
*
* @param options - Read options. (Optional)
*/
async getEvents(options?: EventReadOptions): Promise<this["$events"][number]["$record"][]> {
return this.events.get(options);
}
/**
* Retrieve events from the events table under the given streams.
*
* @param streams - Streams to retrieve events for.
* @param options - Read options to pass to the provider. (Optional)
*/
async getEventsByStreams(streams: string[], options?: EventReadOptions): Promise<TEventFactory["$events"][number]["$record"][]> {
return this.events.getByStreams(streams, options);
}
/**
* Retrieve all events under the given relational keys.
*
* @param keys - Relational keys to retrieve events for.
* @param options - Relational logic options. (Optional)
*/
async getEventsByRelations(keys: string[], options?: EventReadOptions): Promise<TEventFactory["$events"][number]["$record"][]> {
const streamIds = await this.relations.getByKeys(keys);
if (streamIds.length === 0) {
return [];
}
return this.events.getByStreams(streamIds, options);
}
/*
|--------------------------------------------------------------------------------
| Reducers
|--------------------------------------------------------------------------------
*/
/**
* Make a new event reducer based on the events registered with the event store.
*
* @param reducer - Reducer method to run over given events.
* @param state - Initial state.
*
* @example
* ```ts
* const reducer = eventStore.makeReducer<{ name: string }>((state, event) => {
* switch (event.type) {
* case "FooCreated": {
* state.name = event.data.name;
* break;
* }
* }
* return state;
* }, () => ({
* name: ""
* }));
*
* const state = await eventStore.reduce({ name: "foo:reducer", stream: "stream-id", reducer });
* ```
*/
makeReducer<TState extends Unknown>(foldFn: ReducerLeftFold<TState, TEventFactory>, stateFn: ReducerState<TState>): Reducer<TEventFactory, TState> {
return makeReducer<TEventFactory, TState>(foldFn, stateFn);
}
/**
* Make a new event reducer based on the events registered with the event store.
*
* @param aggregate - Aggregate class to create instance from.
*
* @example
* ```ts
* class Foo extends AggregateRoot<Event> {
* name: string = "";
*
* static #reducer = makeAggregateReducer(Foo);
*
* static async getById(fooId: string): Promise<Foo | undefined> {
* return eventStore.reduce({
* name: "foo",
* stream: "stream-id",
* reducer: this.#reducer,
* });
* }
*
* with(event) {
* switch (event.type) {
* case "FooCreated": {
* this.name = event.data.name;
* break;
* }
* }
* }
* });
* ```
*/
makeAggregateReducer<TAggregateRoot extends typeof AggregateRoot<TEventFactory>>(aggregate: TAggregateRoot): Reducer<TEventFactory, InstanceType<TAggregateRoot>> {
return makeAggregateReducer<TEventFactory, TAggregateRoot>(aggregate);
}
/**
* Reduce events in the given stream to a entity state.
*
* @param query - Reducer query to resolve event state from.
* @param pending - List of non comitted events to append to the server events.
*
* @example
*
* ```ts
* const state = await eventStore.reduce({ stream, reducer });
* ```
*
* @example
*
* ```ts
* const state = await eventStore.reduce({ relation: `foo:${foo}:bars`, reducer });
* ```
*
* Reducers are created through the `.makeReducer` and `.makeAggregateReducer` method.
*/
async reduce<TReducer extends Reducer>(
{ name, stream, relation, reducer, ...query }: ReduceQuery<TReducer>,
pending: TEventFactory["$events"][number]["$record"][] = [],
): Promise<ReturnType<TReducer["reduce"]> | undefined> {
const id = stream ?? relation;
let state: InferReducerState<TReducer> | undefined;
let cursor: string | undefined;
const snapshot = await this.getSnapshot(name, id);
if (snapshot !== undefined) {
cursor = snapshot.cursor;
state = snapshot.state;
}
const events = (
stream !== undefined ? await this.getEventsByStreams([id], { ...query, cursor }) : await this.getEventsByRelations([id], { ...query, cursor })
).concat(pending);
if (events.length === 0) {
if (state !== undefined) {
return reducer.from(state);
}
return undefined;
}
const result = reducer.reduce(events, state);
if (this.#snapshot === "auto") {
await this.snapshots.insert(name, id, events.at(-1)!.created, result);
}
return result;
}
/*
|--------------------------------------------------------------------------------
| Snapshots
|--------------------------------------------------------------------------------
*/
/**
* Create a new snapshot for the given stream/relation and reducer.
*
* @param query - Reducer query to create snapshot from.
*
* @example
* ```ts
* await eventStore.createSnapshot({ stream, reducer });
* ```
*
* @example
* ```ts
* await eventStore.createSnapshot({ relation: `foo:${foo}:bars`, reducer });
* ```
*/
async createSnapshot<TReducer extends Reducer>({ name, stream, relation, reducer, ...query }: ReduceQuery<TReducer>): Promise<void> {
const id = stream ?? relation;
const events = stream !== undefined ? await this.getEventsByStreams([id], query) : await this.getEventsByRelations([id], query);
if (events.length === 0) {
return undefined;
}
await this.snapshots.insert(name, id, events.at(-1)!.created, reducer.reduce(events));
}
/**
* Get an entity state snapshot from the database. These are useful for when we
* want to reduce the amount of events that has to be processed when fetching
* state history for a reducer.
*
* @param streamOrRelation - Stream, or Relation to get snapshot for.
* @param reducer - Reducer to get snapshot for.
*
* @example
* ```ts
* const snapshot = await eventStore.getSnapshot("foo:reducer", stream);
* console.log(snapshot);
* // {
* // cursor: "jxubdY-0",
* // state: {
* // foo: "bar"
* // }
* // }
* ```
*
* @example
* ```ts
* const snapshot = await eventStore.getSnapshot("foo:reducer", `foo:${foo}:bars`);
* console.log(snapshot);
* // {
* // cursor: "jxubdY-0",
* // state: {
* // count: 1
* // }
* // }
* ```
*/
async getSnapshot<TReducer extends Reducer, TState = InferReducerState<TReducer>>(
name: string,
streamOrRelation: string,
): Promise<{ cursor: string; state: TState } | undefined> {
const snapshot = await this.snapshots.getByStream(name, streamOrRelation);
if (snapshot === undefined) {
return undefined;
}
return { cursor: snapshot.cursor, state: snapshot.state as TState };
}
/**
* Delete a snapshot.
*
* @param streamOrRelation - Stream, or Relation to delete snapshot for.
* @param reducer - Reducer to remove snapshot for.
*
* @example
* ```ts
* await eventStore.deleteSnapshot("foo:reducer", stream);
* ```
*
* @example
* ```ts
* await eventStore.deleteSnapshot("foo:reducer", `foo:${foo}:bars`);
* ```
*/
async deleteSnapshot(name: string, streamOrRelation: string): Promise<void> {
await this.snapshots.remove(name, streamOrRelation);
}
}
/*
|--------------------------------------------------------------------------------
| Types
|--------------------------------------------------------------------------------
*/
type EventStoreConfig<
TEventFactory extends EventFactory,
TAggregateFactory extends AggregateFactory<TEventFactory>,
TEventStoreAdapter extends EventStoreAdapter<any>,
> = {
adapter: TEventStoreAdapter;
events: TEventFactory;
aggregates: TAggregateFactory;
snapshot?: "manual" | "auto";
hooks?: EventStoreHooks<TEventFactory>;
};
export type EventsInsertSettings = {
/**
* Should the event store emit events after successfull insertion.
* This only takes false as value and by default events are always
* projected.
*/
emit?: false;
/**
* Batch key that can be used to group several events in a single
* batched operation for performance sensitive handling.
*/
batch?: string;
};
export type EventStoreHooks<TEventFactory extends EventFactory> = Partial<{
/**
* Triggered when `.pushEvent` and `.pushManyEvents` has completed successfully.
*
* @param records - List of event records inserted.
* @param settings - Event insert settings used.
*/
onEventsInserted(records: TEventFactory["$events"][number]["$record"][], settings: EventsInsertSettings): Promise<void>;
/**
* Triggered when an unhandled exception is thrown during `.pushEvent` and
* `.pushManyEvents` hook.
*
* @param error - Error that was thrown.
*/
onError(error: unknown): Promise<void>;
}>;
export type AnyEventStore = EventStore<any, any, any>;

203
libraries/event.ts Normal file
View File

@@ -0,0 +1,203 @@
import z, { ZodType } from "zod";
import { EventValidationError } from "./errors.ts";
import { makeId } from "./nanoid.ts";
import { getLogicalTimestamp } from "./time.ts";
import { toPrettyErrorLines } from "./zod.ts";
export class Event<TEventState extends EventState = EventState> {
declare readonly $record: EventRecord<TEventState>;
declare readonly $payload: EventPayload<TEventState>;
constructor(readonly state: TEventState) {}
/**
* Stores the recorded partial piece of data that makes up a larger aggregate
* state.
*
* @param data - Schema used to parse and infer the data supported by the event.
*/
data<TData extends ZodType>(data: TData): Event<Omit<TEventState, "data"> & { data: TData }> {
return new Event<Omit<TEventState, "data"> & { data: TData }>({ ...this.state, data });
}
/**
* Stores additional meta data about the event that is not directly related
* to the aggregate state.
*
* @param meta - Schema used to parse and infer the meta supported by the event.
*/
meta<TMeta extends ZodType>(meta: TMeta): Event<Omit<TEventState, "meta"> & { meta: TMeta }> {
return new Event<Omit<TEventState, "meta"> & { meta: TMeta }>({ ...this.state, meta });
}
/**
* Creates an event record by combining the given event with additional metadata.
* The resulting record can be stored in an event store.
*
* @param payload - The event to record.
*/
record(payload: EventPayload<TEventState>): EventRecord<TEventState> {
const timestamp = getLogicalTimestamp();
const record = {
id: makeId(),
stream: payload.stream ?? makeId(),
type: this.state.type,
data: "data" in payload ? payload.data : null,
meta: "meta" in payload ? payload.meta : null,
created: timestamp,
recorded: timestamp,
} as any;
const validation = this.validate(record);
if (validation.success === false) {
throw new EventValidationError(record, validation.errors);
}
return record;
}
/**
* Takes an event record and validates it against the event.
*
* @param record - Record to validate.
*/
validate(record: EventRecord<TEventState>): EventValidationResult {
const errors = [];
if (record.type !== this.state.type) {
errors.push(`✖ Event record '${record.type}' does not belong to '${this.state.type}' event.`);
}
if (record.data !== null) {
if (this.state.data === undefined) {
errors.push(`✖ Event record '${record.type}' does not have a 'data' validator.`);
} else {
const result = this.state.data.safeParse(record.data);
if (result.success === false) {
errors.push(toPrettyErrorLines(result.error));
}
}
}
if (record.meta !== null) {
if (this.state.meta === undefined) {
errors.push(`✖ Event record '${record.type}' does not have a 'meta' validator.`);
} else {
const result = this.state.meta.safeParse(record.meta);
if (result.success === false) {
errors.push(toPrettyErrorLines(result.error));
}
}
}
if (errors.length !== 0) {
return { success: false, errors };
}
return { success: true };
}
}
export const event = {
type<const TType extends string>(type: TType): Event<{ type: TType }> {
return new Event<{ type: TType }>({ type });
},
};
type EventState = {
type: string;
data?: ZodType;
meta?: ZodType;
};
export type EventPayload<TEventState extends EventState> = { stream?: string } & (TEventState["data"] extends ZodType
? { data: z.infer<TEventState["data"]> }
: object) &
(TEventState["meta"] extends ZodType ? { meta: z.infer<TEventState["meta"]> } : object);
type EventValidationResult =
| {
success: true;
}
| {
success: false;
errors: any[];
};
/**
* Event that has been persisted to a event store solution.
*/
export type EventRecord<TEvent extends EventState = EventState> = {
/**
* A unique event identifier.
*/
id: string;
/**
* Event streams are used to group related events together. This identifier
* is used to identify the stream to which the event belongs.
*/
stream: string;
/**
* Type refers to the purpose of the event in a past tense descibing something
* that has already happened.
*/
type: TEvent["type"];
/**
* Key holding event data that can be used to update one or several read
* models and used to generate aggregate state for the stream in which the
* event belongs.
*/
data: TEvent["data"] extends ZodType ? z.infer<TEvent["data"]> : null;
/**
* Key holding meta data that is not directly tied to read models or used
* in aggregate states.
*/
meta: TEvent["meta"] extends ZodType ? z.infer<TEvent["meta"]> : null;
/**
* An immutable hybrid logical clock timestamp representing the wall time when
* the event was created.
*
* This value is used to identify the date of its creation as well as a sorting
* key when performing reduction logic to generate aggregate state for the
* stream in which the event belongs.
*/
created: string;
/**
* A mutable hybrid logical clock timestamp representing the wall time when the
* event was recorded to the local **event ledger** _(database)_ as opposed to
* when the event was actually created.
*
* This value is used when performing event synchronization between two
* different event ledgers.
*/
recorded: string;
};
/**
* Status of an event and how it relates to other events in the aggregate
* stream it has been recorded.
*/
export type EventStatus = {
/**
* Does the event already exist in the containing stream. This is an
* optimization flag so that we can potentially ignore the processing of the
* event if it already exists.
*/
exists: boolean;
/**
* Is there another event in the stream of the same type that is newer than
* the provided event. This is passed into projectors so that they can
* route the event to the correct projection handlers.
*
* @see {@link Projection [once|on|all]}
*/
outdated: boolean;
};

122
libraries/hlc.ts Normal file
View File

@@ -0,0 +1,122 @@
import { HLCClockOffsetError, HLCForwardJumpError, HLCWallTimeOverflowError } from "./errors.ts";
import { Timestamp } from "./timestamp.ts";
export class HLC {
time: typeof getTime;
maxTime: number;
maxOffset: number;
timeUpperBound: number;
toleratedForwardClockJump: number;
last: Timestamp;
constructor(
{ time = getTime, maxOffset = 0, timeUpperBound = 0, toleratedForwardClockJump = 0, last }: Options = {},
) {
this.time = time;
this.maxTime = timeUpperBound > 0 ? timeUpperBound : Number.MAX_SAFE_INTEGER;
this.maxOffset = maxOffset;
this.timeUpperBound = timeUpperBound;
this.toleratedForwardClockJump = toleratedForwardClockJump;
this.last = new Timestamp(this.time());
if (last) {
this.last = Timestamp.bigger(new Timestamp(last.time), this.last);
}
}
now(): Timestamp {
return this.update(this.last);
}
update(other: Timestamp): Timestamp {
this.last = this.#getTimestamp(other);
return this.last;
}
#getTimestamp(other: Timestamp): Timestamp {
const [time, logical] = this.#getTimeAndLogicalValue(other);
if (!this.#validUpperBound(time)) {
throw new HLCWallTimeOverflowError(time, logical);
}
return new Timestamp(time, logical);
}
#getTimeAndLogicalValue(other: Timestamp): [number, number] {
const last = Timestamp.bigger(other, this.last);
const time = this.time();
if (this.#validOffset(last, time)) {
return [time, 0];
}
return [last.time, last.logical + 1];
}
#validOffset(last: Timestamp, time: number): boolean {
const offset = last.time - time;
if (!this.#validForwardClockJump(offset)) {
throw new HLCForwardJumpError(-offset, this.toleratedForwardClockJump);
}
if (!this.#validMaxOffset(offset)) {
throw new HLCClockOffsetError(offset, this.maxOffset);
}
if (offset < 0) {
return true;
}
return false;
}
#validForwardClockJump(offset: number): boolean {
if (this.toleratedForwardClockJump > 0 && -offset > this.toleratedForwardClockJump) {
return false;
}
return true;
}
#validMaxOffset(offset: number): boolean {
if (this.maxOffset > 0 && offset > this.maxOffset) {
return false;
}
return true;
}
#validUpperBound(time: number): boolean {
return time < this.maxTime;
}
toJSON() {
return Object.freeze({
maxOffset: this.maxOffset,
timeUpperBound: this.timeUpperBound,
toleratedForwardClockJump: this.toleratedForwardClockJump,
last: this.last.toJSON(),
});
}
}
/*
|--------------------------------------------------------------------------------
| Utilities
|--------------------------------------------------------------------------------
*/
export function getTime(): number {
return Date.now();
}
/*
|--------------------------------------------------------------------------------
| Types
|--------------------------------------------------------------------------------
*/
export type Options = {
time?: typeof getTime;
maxOffset?: number;
timeUpperBound?: number;
toleratedForwardClockJump?: number;
last?: {
time: number;
logical: number;
};
};

10
libraries/nanoid.ts Normal file
View File

@@ -0,0 +1,10 @@
import { nanoid } from "nanoid";
/**
* Generate a new nanoid.
*
* @param size - Size of the id. Default: 11
*/
export function makeId(size: number = 11): string {
return nanoid(size);
}

271
libraries/projector.ts Normal file
View File

@@ -0,0 +1,271 @@
import type { Subscription } from "../types/common.ts";
import type {
BatchedProjectionHandler,
BatchedProjectorListeners,
ProjectionFilter,
ProjectionHandler,
ProjectionStatus,
ProjectorListenerFn,
ProjectorListeners,
ProjectorMessage,
} from "../types/projector.ts";
import { EventFactory } from "./event-factory.ts";
import { Queue } from "./queue.ts";
/*
|--------------------------------------------------------------------------------
| Filters
|--------------------------------------------------------------------------------
*/
const FILTER_ONCE = Object.freeze<ProjectionFilter>({
allowHydratedEvents: false,
allowOutdatedEvents: false,
});
const FILTER_CONTINUOUS = Object.freeze<ProjectionFilter>({
allowHydratedEvents: true,
allowOutdatedEvents: false,
});
const FILTER_ALL = Object.freeze<ProjectionFilter>({
allowHydratedEvents: true,
allowOutdatedEvents: true,
});
/*
|--------------------------------------------------------------------------------
| Projector
|--------------------------------------------------------------------------------
*/
/**
* Manages event projections by handling and distributing events to registered listeners.
*
* The `Projector` class is responsible for processing event records and invoking
* projection handlers based on predefined filters. It supports different projection
* patterns, including one-time projections, continuous projections, and catch-all projections.
* Additionally, it enables batched event processing for optimized handling of multiple events.
*
* @template TEventRecord - TType of event records processed by this projector.
*/
export class Projector<TEventFactory extends EventFactory = EventFactory> {
#listeners: ProjectorListeners<TEventFactory["$events"][number]["$record"]> = {};
#batchedListeners: BatchedProjectorListeners<TEventFactory["$events"][number]["$record"]> = {};
#queues: {
[stream: string]: Queue<ProjectorMessage<TEventFactory["$events"][number]["$record"]>>;
} = {};
constructor() {
this.push = this.push.bind(this);
}
#makeQueue(stream: string) {
this.#queues[stream] = new Queue(
async ({ record, status }) => {
return Promise.all(Array.from(this.#listeners[record.type as string] || []).map((fn) => fn(record, status)));
},
{
onDrained: () => {
delete this.#queues[stream];
},
},
);
}
/*
|--------------------------------------------------------------------------------
| Methods
|--------------------------------------------------------------------------------
*/
async push(record: TEventFactory["$events"][number]["$record"], status: ProjectionStatus): Promise<boolean> {
return new Promise<boolean>((resolve, reject) => {
if (this.#queues[record.stream] === undefined) {
this.#makeQueue(record.stream);
}
this.#queues[record.stream].push({ record, status }, resolve, reject);
});
}
async pushMany(key: string, records: TEventFactory["$events"][number]["$record"][]): Promise<void> {
await Promise.all(Array.from(this.#batchedListeners[key] || []).map((fn) => fn(records)));
}
/*
|--------------------------------------------------------------------------------
| Handlers
|--------------------------------------------------------------------------------
*/
/**
* Create a batched projection handler taking in a list of events inserted under
* a specific batched key.
*
* @param key - Batch key being projected.
* @param handler - Handler method to execute when events are projected.
*/
batch(key: string, handler: BatchedProjectionHandler<TEventFactory["$events"][number]["$record"]>): Subscription {
const listeners = (this.#batchedListeners[key] ?? (this.#batchedListeners[key] = new Set())).add(handler);
return {
unsubscribe() {
listeners.delete(handler);
},
};
}
/**
* Create a single run projection handler.
*
* @remarks
*
* This method tells the projection that an event is only ever processed when
* the event is originating directly from the local event store. A useful
* pattern for when you want the event handler to submit data to a third
* party service such as sending an email or submitting third party orders.
*
* We disallow `hydrate` and `outdated` as these events represents events
* that has already been processed.
*
* @param type - Event type being projected.
* @param handler - Handler method to execute when event is projected.
*/
once<
TType extends TEventFactory["$events"][number]["$record"]["type"],
TRecord extends TEventFactory["$events"][number]["$record"] = Extract<TEventFactory["$events"][number]["$record"], { type: TType }>,
TSuccessData extends Record<string, any> | void = void,
>(
type: TType,
handler: ProjectionHandler<TRecord, TSuccessData>,
effects: TSuccessData extends void
? {
onError(res: { error: unknown; record: TRecord }): Promise<void>;
onSuccess(res: { record: TRecord }): Promise<void>;
}
: {
onError(res: { error: unknown; record: TRecord }): Promise<void>;
onSuccess(res: { data: TSuccessData; record: TRecord }): Promise<void>;
},
): Subscription {
return this.#subscribe(type, FILTER_ONCE, handler as any, effects);
}
/**
* Create a continuous projection handler.
*
* @remarks
*
* This method tells the projection to allow events directly from the event
* store as well as events coming through hydration via sync, manual or
* automatic stream rehydration operations. This is the default pattern
* used for most events. This is where you usually project the latest data
* to your read side models and data stores.
*
* We allow `hydrate` as they serve to keep the read side up to date with
* the latest events. We disallow `outdated` as we do not want the latest
* data to be overridden by outdated ones.
*
* NOTE! The nature of this pattern means that outdated events are never
* run by this projection. Make sure to handle `outdated` events if you
* have processing requirements that needs to know about every unknown
* events that has occurred in the event stream.
*
* @param type - Event type being projected.
* @param handler - Handler method to execute when event is projected.
*/
on<
TType extends TEventFactory["$events"][number]["$record"]["type"],
TRecord extends TEventFactory["$events"][number]["$record"] = Extract<TEventFactory["$events"][number]["$record"], { type: TType }>,
>(type: TType, handler: ProjectionHandler<TRecord>): Subscription {
return this.#subscribe(type, FILTER_CONTINUOUS, handler as any);
}
/**
* Create a catch all projection handler.
*
* @remarks
*
* This method is a catch all for events that does not fall under the
* stricter definitions of once and on patterns. This is a good place
* to deal with data that does not depend on a strict order of events.
*
* @param type - Event type being projected.
* @param handler - Handler method to execute when event is projected.
*/
all<
TType extends TEventFactory["$events"][number]["$record"]["type"],
TRecord extends TEventFactory["$events"][number]["$record"] = Extract<TEventFactory["$events"][number]["$record"], { type: TType }>,
>(type: TType, handler: ProjectionHandler<TRecord>): Subscription {
return this.#subscribe(type, FILTER_ALL, handler as any);
}
/*
|--------------------------------------------------------------------------------
| Helpers
|--------------------------------------------------------------------------------
*/
/**
* Create a event subscription against given type with assigned filter and handler.
*
* @param type - Event type to listen for.
* @param filter - Projection filter to validate against.
* @param handler - Handler to execute.
*/
#subscribe(
type: string,
filter: ProjectionFilter,
handler: ProjectionHandler<TEventFactory["$events"][number]["$record"]>,
effects?: {
onError(res: { error: unknown; record: TEventFactory["$events"][number]["$record"] }): Promise<void>;
onSuccess(res: { data?: unknown; record: TEventFactory["$events"][number]["$record"] }): Promise<void>;
},
): { unsubscribe: () => void } {
return {
unsubscribe: this.#addEventListener(type, async (record, state) => {
if (this.#hasValidState(filter, state)) {
await handler(record)
.then((data: unknown) => {
effects?.onSuccess({ data, record });
})
.catch((error) => {
if (effects !== undefined) {
effects.onError({ error, record });
} else {
throw error;
}
});
}
}),
};
}
/**
* Register a new event listener to handle incoming projection requests.
*
* @param type - Event type to listen for.
* @param fn - Listener fn to execute.
*/
#addEventListener(type: string, fn: ProjectorListenerFn<TEventFactory["$events"][number]["$record"]>): () => void {
const listeners = (this.#listeners[type] ?? (this.#listeners[type] = new Set())).add(fn);
return () => {
listeners.delete(fn);
};
}
/**
* Check if the projection filter is compatible with the provided state.
*
* @param filter - Projection filter to match against.
* @param state - Projection state to validate.
*/
#hasValidState(filter: ProjectionFilter, { hydrated, outdated }: ProjectionStatus) {
if (filter.allowHydratedEvents === false && hydrated === true) {
return false;
}
if (filter.allowOutdatedEvents === false && outdated === true) {
return false;
}
return true;
}
}

100
libraries/queue.ts Normal file
View File

@@ -0,0 +1,100 @@
export class Queue<T> {
status: Status;
#queue: Message<T>[];
#handle: Handler<T>;
#hooks: Hooks;
constructor(handler: Handler<T>, hooks: Hooks = {}) {
this.status = "idle";
this.#queue = [];
this.#handle = handler;
this.#hooks = hooks;
}
/*
|--------------------------------------------------------------------------------
| Utilities
|--------------------------------------------------------------------------------
*/
is(status: Status): boolean {
return this.status === status;
}
push(message: T, resolve: MessagePromise["resolve"], reject: MessagePromise["reject"]): this {
this.#queue.push({ message, resolve, reject });
this.#process();
return this;
}
flush(filter?: Filter<Message<T>>): this {
if (filter) {
this.#queue = this.#queue.filter(filter);
} else {
this.#queue = [];
}
return this;
}
/*
|--------------------------------------------------------------------------------
| Processor
|--------------------------------------------------------------------------------
*/
async #process(): Promise<this> {
if (this.is("working")) {
return this;
}
this.#setStatus("working");
const job = this.#queue.shift();
if (!job) {
return this.#setStatus("drained");
}
this.#handle(job.message)
.then(job.resolve)
.catch(job.reject)
.finally(() => {
this.#setStatus("idle").#process();
});
return this;
}
#setStatus(value: Status): this {
this.status = value;
if (value === "drained") {
this.#hooks.onDrained?.();
}
return this;
}
}
/*
|--------------------------------------------------------------------------------
| Types
|--------------------------------------------------------------------------------
*/
type Status = "idle" | "working" | "drained";
type Handler<T> = (message: T) => Promise<any> | Promise<any[]>;
type Hooks = {
onDrained?: () => void;
};
type Message<T> = {
message: T;
} & MessagePromise;
type MessagePromise = {
resolve: (value: any) => void;
reject: (reason?: any) => void;
};
type Filter<T> = (job: T) => boolean;

90
libraries/reducer.ts Normal file
View File

@@ -0,0 +1,90 @@
import type { AggregateRoot } from "../libraries/aggregate.ts";
import type { Unknown } from "../types/common.ts";
import { EventFactory } from "./event-factory.ts";
/**
* Make an event reducer that produces a aggregate instance from resolved
* events.
*
* @param aggregate - Aggregate to instantiate and create an instance of.
*/
export function makeAggregateReducer<TEventFactory extends EventFactory, TAggregateRoot extends typeof AggregateRoot<TEventFactory>>(
aggregate: TAggregateRoot,
): Reducer<TEventFactory, InstanceType<TAggregateRoot>> {
return {
from(snapshot: Unknown) {
return aggregate.from(snapshot);
},
reduce(events: TEventFactory["$events"][number]["$record"][], snapshot?: Unknown) {
const instance = aggregate.from(snapshot);
for (const event of events) {
instance.with(event);
}
return instance;
},
};
}
/**
* Make an event reducer that produces a state based on resolved events.
*
* @param foldFn - Method which handles the event reduction.
* @param stateFn - Default state factory.
*/
export function makeReducer<TEventFactory extends EventFactory, TState extends Unknown>(
foldFn: ReducerLeftFold<TState, TEventFactory>,
stateFn: ReducerState<TState>,
): Reducer<TEventFactory, TState> {
return {
from(snapshot: TState) {
return snapshot;
},
reduce(events: TEventFactory["$events"][number]["$record"][], snapshot?: TState) {
return events.reduce(foldFn, snapshot ?? (stateFn() as TState));
},
};
}
export type Reducer<TEventFactory extends EventFactory = EventFactory, TState extends Record<string, unknown> | AggregateRoot<TEventFactory> = any> = {
/**
* Return result directly from a snapshot that does not have any subsequent
* events to fold onto a state.
*
* @param snapshot - Snapshot of a reducer state.
*/
from(snapshot: Unknown): TState;
/**
* Take in a list of events, and return a state from the given events.
*
* @param events - Events to reduce.
* @param snapshot - Initial snapshot state to apply to the reducer.
*/
reduce(events: TEventFactory["$events"][number]["$record"][], snapshot?: Unknown): TState;
};
/**
* Take an event, and fold it onto the given state.
*
* @param state - State to fold onto.
* @param event - Event to fold from.
*
* @example
* ```ts
* const events = [...events];
* const state = events.reduce((state, event) => {
* state.foo = event.data.foo;
* return state;
* }, {
* foo: ""
* })
* ```
*/
export type ReducerLeftFold<TState extends Record<string, unknown> = any, TEventFactory extends EventFactory = EventFactory> = (
state: TState,
event: TEventFactory["$events"][number]["$record"],
) => TState;
export type ReducerState<TState extends Unknown> = () => TState;
export type InferReducerState<TReducer> = TReducer extends Reducer<infer _, infer TState> ? TState : never;

40
libraries/time.ts Normal file
View File

@@ -0,0 +1,40 @@
import { HLC } from "./hlc.ts";
import { Timestamp } from "./timestamp.ts";
const clock = new HLC();
/**
* Get a date object from given event meta timestamp.
*
* @param timestamp - Event meta timestamp.
*/
export function getDate(timestamp: string): Date {
return new Date(getUnixTimestamp(timestamp));
}
/**
* Get logical timestamp based on current time.
*/
export function getLogicalTimestamp(): string {
const ts = clock.now().toJSON();
return `${ts.time}-${String(ts.logical).padStart(5, "0")}`;
}
/**
* Get timestamp instance from provided logical timestamp.
*
* @param ts - Logical timestamp to convert.
*/
export function getTimestamp(ts: string): Timestamp {
const [time, logical] = ts.split("-");
return new Timestamp(time, Number(logical));
}
/**
* Get unix timestamp value from provided logical timestamp.
*
* @param ts - Logical timestamp to convert.
*/
export function getUnixTimestamp(ts: string): number {
return getTimestamp(ts).time;
}

49
libraries/timestamp.ts Normal file
View File

@@ -0,0 +1,49 @@
export const RADIX = 36;
export class Timestamp {
readonly time: number;
readonly logical: number;
constructor(time: TimeLike, logical = 0) {
this.time = typeof time === "string" ? parseInt(time, RADIX) : time;
this.logical = logical;
}
static bigger(a: Timestamp, b: Timestamp): Timestamp {
return a.compare(b) === -1 ? b : a;
}
encode(): string {
return this.time.toString(RADIX);
}
compare(other: Timestamp): 1 | 0 | -1 {
if (this.time > other.time) {
return 1;
}
if (this.time < other.time) {
return -1;
}
if (this.logical > other.logical) {
return 1;
}
if (this.logical < other.logical) {
return -1;
}
return 0;
}
toJSON(): TimestampJSON {
return Object.freeze({
time: this.encode(),
logical: this.logical,
});
}
}
export type TimeLike = string | number;
type TimestampJSON = {
readonly time: string;
readonly logical: number;
};

33
libraries/zod.ts Normal file
View File

@@ -0,0 +1,33 @@
import { ZodError } from "zod";
export function toPrettyErrorLines(error: ZodError, padding: number = 0): string[] {
const lines: string[] = [];
const margin = " ".repeat(padding);
const issues = [...error.issues].sort((a, b) => a.path.length - b.path.length);
for (const issue of issues) {
lines.push(`${margin}${issue.message}`);
if (issue.path?.length) {
lines.push(`${margin} → at ${toDotPath(issue.path)}`);
}
}
return lines;
}
function toDotPath(path: (string | number | symbol)[]): string {
const segs: string[] = [];
for (const seg of path) {
if (typeof seg === "number") {
segs.push(`[${seg}]`);
} else if (typeof seg === "symbol") {
segs.push(`["${String(seg)}"]`);
} else if (seg.includes(".")) {
segs.push(`["${seg}"]`);
} else {
if (segs.length) {
segs.push(".");
}
segs.push(seg);
}
}
return segs.join("");
}

15
mod.ts Normal file
View File

@@ -0,0 +1,15 @@
export * from "./libraries/aggregate.ts";
export * from "./libraries/errors.ts";
export * from "./libraries/event.ts";
export * from "./libraries/event-store.ts";
export * from "./libraries/nanoid.ts";
export * from "./libraries/projector.ts";
export * from "./libraries/queue.ts";
export * from "./libraries/reducer.ts";
export * from "./libraries/time.ts";
export * from "./libraries/timestamp.ts";
export type * from "./types/adapter.ts";
export type * from "./types/common.ts";
export type * from "./types/projector.ts";
export type * from "./types/query.ts";
export type * from "./types/utilities.ts";

20
package.json Normal file
View File

@@ -0,0 +1,20 @@
{
"dependencies": {
"@std/async": "npm:@jsr/std__async@1.0.12",
"@valkyr/db": "1.0.1",
"mongodb": "6.15.0",
"nanoid": "5.1.5",
"postgres": "3.4.5",
"zod": "next"
},
"devDependencies": {
"@std/assert": "npm:@jsr/std__assert@1.0.12",
"@std/testing": "npm:@jsr/std__testing@1.0.11",
"@valkyr/testcontainers": "npm:@jsr/valkyr__testcontainers@2.0.0",
"eslint": "9.24.0",
"eslint-plugin-simple-import-sort": "12.1.1",
"fake-indexeddb": "6.0.0",
"prettier": "3.5.3",
"typescript-eslint": "8.30.1"
}
}

View File

@@ -0,0 +1,81 @@
import "fake-indexeddb/auto";
import { delay } from "@std/async";
import { afterAll, describe } from "@std/testing/bdd";
import { BrowserAdapter } from "../../adapters/browser/adapter.ts";
import { EventStore, EventStoreHooks } from "../../libraries/event-store.ts";
import { Projector } from "../../libraries/projector.ts";
import { aggregates } from "./mocks/aggregates.ts";
import { events, EventStoreFactory } from "./mocks/events.ts";
import testAddEvent from "./store/add-event.ts";
import testCreateSnapshot from "./store/create-snapshot.ts";
import testMakeAggregateReducer from "./store/make-aggregate-reducer.ts";
import testMakeReducer from "./store/make-reducer.ts";
import testOnceProjection from "./store/once-projection.ts";
import testPushAggregate from "./store/push-aggregate.ts";
import testPushManyAggregates from "./store/push-many-aggregates.ts";
import testReduce from "./store/reduce.ts";
import testReplayEvents from "./store/replay-events.ts";
const eventStoreFn = async (options: { hooks?: EventStoreHooks<EventStoreFactory> } = {}) => getEventStore(options);
/*
|--------------------------------------------------------------------------------
| Lifecycle
|--------------------------------------------------------------------------------
*/
afterAll(async () => {
await delay(250);
});
/*
|--------------------------------------------------------------------------------
| Tests
|--------------------------------------------------------------------------------
*/
describe("Adapter > Browser (IndexedDb)", () => {
testAddEvent(eventStoreFn);
testCreateSnapshot(eventStoreFn);
testMakeReducer(eventStoreFn);
testMakeAggregateReducer(eventStoreFn);
testReplayEvents(eventStoreFn);
testReduce(eventStoreFn);
testOnceProjection(eventStoreFn);
testPushAggregate(eventStoreFn);
testPushManyAggregates(eventStoreFn);
});
/*
|--------------------------------------------------------------------------------
| Utilities
|--------------------------------------------------------------------------------
*/
function getEventStore({ hooks = {} }: { hooks?: EventStoreHooks<EventStoreFactory> }) {
const store = new EventStore({
adapter: new BrowserAdapter("indexeddb"),
events,
aggregates,
hooks,
});
const projector = new Projector<EventStoreFactory>();
if (hooks.onEventsInserted === undefined) {
store.onEventsInserted(async (records, { batch }) => {
if (batch !== undefined) {
await projector.pushMany(batch, records);
} else {
for (const record of records) {
await projector.push(record, { hydrated: false, outdated: false });
}
}
});
}
return { store, projector };
}

View File

@@ -0,0 +1,70 @@
import "fake-indexeddb/auto";
import { describe } from "@std/testing/bdd";
import { BrowserAdapter } from "../../adapters/browser/adapter.ts";
import { EventStore, EventStoreHooks } from "../../libraries/event-store.ts";
import { Projector } from "../../libraries/projector.ts";
import { aggregates } from "./mocks/aggregates.ts";
import { events, EventStoreFactory } from "./mocks/events.ts";
import testAddEvent from "./store/add-event.ts";
import testCreateSnapshot from "./store/create-snapshot.ts";
import testMakeAggregateReducer from "./store/make-aggregate-reducer.ts";
import testMakeReducer from "./store/make-reducer.ts";
import testOnceProjection from "./store/once-projection.ts";
import testPushAggregate from "./store/push-aggregate.ts";
import testPushManyAggregates from "./store/push-many-aggregates.ts";
import testReduce from "./store/reduce.ts";
import testReplayEvents from "./store/replay-events.ts";
const eventStoreFn = async (options: { hooks?: EventStoreHooks<EventStoreFactory> } = {}) => getEventStore(options);
/*
|--------------------------------------------------------------------------------
| Tests
|--------------------------------------------------------------------------------
*/
describe("Adapter > Browser (memory)", () => {
testAddEvent(eventStoreFn);
testCreateSnapshot(eventStoreFn);
testMakeReducer(eventStoreFn);
testMakeAggregateReducer(eventStoreFn);
testReplayEvents(eventStoreFn);
testReduce(eventStoreFn);
testOnceProjection(eventStoreFn);
testPushAggregate(eventStoreFn);
testPushManyAggregates(eventStoreFn);
});
/*
|--------------------------------------------------------------------------------
| Utilities
|--------------------------------------------------------------------------------
*/
function getEventStore({ hooks = {} }: { hooks?: EventStoreHooks<EventStoreFactory> }) {
const store = new EventStore({
adapter: new BrowserAdapter("memorydb"),
events,
aggregates,
hooks,
});
const projector = new Projector<EventStoreFactory>();
if (hooks.onEventsInserted === undefined) {
store.onEventsInserted(async (records, { batch }) => {
if (batch !== undefined) {
await projector.pushMany(batch, records);
} else {
for (const record of records) {
await projector.push(record, { hydrated: false, outdated: false });
}
}
});
}
return { store, projector };
}

View File

@@ -0,0 +1,134 @@
import { AggregateRoot } from "../../../libraries/aggregate.ts";
import { AggregateFactory } from "../../../libraries/aggregate-factory.ts";
import { makeId } from "../../../libraries/nanoid.ts";
import { makeAggregateReducer } from "../../../libraries/reducer.ts";
import { EventStoreFactory } from "./events.ts";
export class User extends AggregateRoot<EventStoreFactory> {
static override readonly name = "user";
id: string = "";
name: Name = {
given: "",
family: "",
};
email: string = "";
active: boolean = true;
posts: UserPosts = {
list: [],
count: 0,
};
// -------------------------------------------------------------------------
// Factories
// -------------------------------------------------------------------------
static reducer = makeAggregateReducer(User);
static create(name: Name, email: string): User {
const user = new User();
user.push({
type: "user:created",
stream: makeId(),
data: { name, email },
meta: { auditor: "foo" },
});
return user;
}
static async getById(userId: string): Promise<User | undefined> {
return this.$store.reduce({ name: "user", stream: userId, reducer: this.reducer });
}
// -------------------------------------------------------------------------
// Reducer
// -------------------------------------------------------------------------
with(event: EventStoreFactory["$events"][number]["$record"]) {
switch (event.type) {
case "user:created": {
this.id = event.stream;
this.name.given = event.data.name?.given ?? "";
this.name.family = event.data.name?.family ?? "";
this.email = event.data.email;
break;
}
case "user:name:given-set": {
this.name.given = event.data;
break;
}
case "user:name:family-set": {
this.name.family = event.data;
break;
}
case "user:email-set": {
this.email = event.data;
break;
}
case "user:activated": {
this.active = true;
break;
}
case "user:deactivated": {
this.active = false;
break;
}
}
}
// -------------------------------------------------------------------------
// Actions
// -------------------------------------------------------------------------
setGivenName(given: string): this {
return this.push({
type: "user:name:given-set",
stream: this.id,
data: given,
meta: { auditor: "foo" },
});
}
setFamilyName(family: string): this {
return this.push({
type: "user:name:family-set",
stream: this.id,
data: family,
meta: { auditor: "foo" },
});
}
setEmail(email: string, auditor: string): this {
return this.push({
type: "user:email-set",
stream: this.id,
data: email,
meta: { auditor },
});
}
async snapshot(): Promise<this> {
await this.$store.createSnapshot({ name: "user", stream: this.id, reducer: User.reducer });
return this;
}
// -------------------------------------------------------------------------
// Helpers
// -------------------------------------------------------------------------
fullName(): string {
return `${this.name.given} ${this.name.family}`;
}
}
export const aggregates = new AggregateFactory([User]);
type Name = {
given: string;
family: string;
};
type UserPosts = {
list: string[];
count: number;
};

View File

@@ -0,0 +1,19 @@
export abstract class ServiceError<TData = unknown> extends Error {
constructor(message: string, readonly status: number, readonly data?: TData) {
super(message);
}
toJSON() {
return {
status: this.status,
message: this.message,
data: this.data,
};
}
}
export class CustomServiceError<TData = unknown> extends ServiceError<TData> {
constructor(message = "Custom Error", data?: TData) {
super(message, 400, data);
}
}

View File

@@ -0,0 +1,32 @@
import z from "zod";
import { event } from "../../../libraries/event.ts";
import { EventFactory } from "../../../libraries/event-factory.ts";
export const auditor = z.strictObject({ auditor: z.string() });
export const events = new EventFactory([
event
.type("user:created")
.data(
z.strictObject({
name: z
.union([z.strictObject({ given: z.string(), family: z.string().optional() }), z.strictObject({ given: z.string().optional(), family: z.string() })])
.optional(),
email: z.string(),
}),
)
.meta(auditor),
event.type("user:name:given-set").data(z.string()).meta(auditor),
event.type("user:name:family-set").data(z.string()).meta(auditor),
event.type("user:email-set").data(z.email()).meta(auditor),
event.type("user:activated").meta(auditor),
event.type("user:deactivated").meta(auditor),
event
.type("post:created")
.data(z.strictObject({ title: z.string(), body: z.string() }))
.meta(auditor),
event.type("post:removed").meta(auditor),
]);
export type EventStoreFactory = typeof events;

View File

@@ -0,0 +1,32 @@
import { makeReducer } from "../../../libraries/reducer.ts";
import { EventStoreFactory } from "./events.ts";
export const userPostReducer = makeReducer<EventStoreFactory, UserPostState>(
(state, event) => {
switch (event.type) {
case "post:created": {
state.posts.push({ id: event.stream, author: event.meta.auditor });
state.count += 1;
break;
}
case "post:removed": {
state.posts = state.posts.filter(({ id }) => id !== event.stream);
state.count -= 1;
break;
}
}
return state;
},
() => ({
posts: [],
count: 0,
}),
);
type UserPostState = {
posts: {
id: string;
author: string;
}[];
count: number;
};

View File

@@ -0,0 +1,61 @@
import { makeReducer } from "../../../libraries/reducer.ts";
import { EventStoreFactory } from "./events.ts";
export const userReducer = makeReducer<EventStoreFactory, UserState>(
(state, event) => {
switch (event.type) {
case "user:created": {
state.name.given = event.data.name?.given ?? "";
state.name.family = event.data.name?.family ?? "";
state.email = event.data.email;
break;
}
case "user:name:given-set": {
state.name.given = event.data;
break;
}
case "user:name:family-set": {
state.name.family = event.data;
break;
}
case "user:email-set": {
state.email = event.data;
break;
}
case "user:activated": {
state.active = true;
break;
}
case "user:deactivated": {
state.active = false;
break;
}
}
return state;
},
() => ({
name: {
given: "",
family: "",
},
email: "",
active: true,
posts: {
list: [],
count: 0,
},
}),
);
type UserState = {
name: {
given: string;
family: string;
};
email: string;
active: boolean;
posts: {
list: string[];
count: number;
};
};

View File

@@ -0,0 +1,99 @@
import { afterAll, afterEach, beforeAll, describe } from "@std/testing/bdd";
import { MongoTestContainer } from "@valkyr/testcontainers/mongodb";
import { MongoAdapter, register } from "../../adapters/mongo/adapter.ts";
import { EventStore, EventStoreHooks } from "../../libraries/event-store.ts";
import { Projector } from "../../libraries/projector.ts";
import { aggregates } from "./mocks/aggregates.ts";
import { events, EventStoreFactory } from "./mocks/events.ts";
import testAddEvent from "./store/add-event.ts";
import testAddManyEvents from "./store/add-many-events.ts";
import testCreateSnapshot from "./store/create-snapshot.ts";
import testMakeAggregateReducer from "./store/make-aggregate-reducer.ts";
import testMakeEvent from "./store/make-event.ts";
import testMakeReducer from "./store/make-reducer.ts";
import testOnceProjection from "./store/once-projection.ts";
import testRelationsProvider from "./store/providers/relations.ts";
import testPushAggregate from "./store/push-aggregate.ts";
import testPushManyAggregates from "./store/push-many-aggregates.ts";
import testReduce from "./store/reduce.ts";
import testReplayEvents from "./store/replay-events.ts";
const DB_NAME = "sandbox";
const container = await MongoTestContainer.start();
const eventStoreFn = async (options: { hooks?: EventStoreHooks<EventStoreFactory> } = {}) => getEventStore(options);
/*
|--------------------------------------------------------------------------------
| Database
|--------------------------------------------------------------------------------
*/
beforeAll(async () => {
const db = container.client.db(DB_NAME);
await register(db, console.info);
});
afterEach(async () => {
const db = container.client.db(DB_NAME);
await Promise.all([db.collection("events").deleteMany({}), db.collection("relations").deleteMany({}), db.collection("snapshots").deleteMany({})]);
});
afterAll(async () => {
await container.stop();
});
/*
|--------------------------------------------------------------------------------
| Tests
|--------------------------------------------------------------------------------
*/
describe("Adapter > MongoDb", () => {
testRelationsProvider(eventStoreFn);
testAddEvent(eventStoreFn);
testAddManyEvents(eventStoreFn);
testCreateSnapshot(eventStoreFn);
testMakeEvent(eventStoreFn);
testMakeReducer(eventStoreFn);
testMakeAggregateReducer(eventStoreFn);
testReplayEvents(eventStoreFn);
testReduce(eventStoreFn);
testOnceProjection(eventStoreFn);
testPushAggregate(eventStoreFn);
testPushManyAggregates(eventStoreFn);
});
/*
|--------------------------------------------------------------------------------
| Utilities
|--------------------------------------------------------------------------------
*/
async function getEventStore({ hooks = {} }: { hooks?: EventStoreHooks<EventStoreFactory> }) {
const store = new EventStore({
adapter: new MongoAdapter(() => container.client, DB_NAME),
events,
aggregates,
hooks,
});
const projector = new Projector<EventStoreFactory>();
if (hooks.onEventsInserted === undefined) {
store.onEventsInserted(async (records, { batch }) => {
if (batch !== undefined) {
await projector.pushMany(batch, records);
} else {
for (const record of records) {
await projector.push(record, { hydrated: false, outdated: false });
}
}
});
}
return { store, projector };
}

View File

@@ -0,0 +1,137 @@
import { afterAll, afterEach, beforeAll, describe } from "@std/testing/bdd";
import { PostgresTestContainer } from "@valkyr/testcontainers/postgres";
import postgres from "postgres";
import { PostgresAdapter } from "../../adapters/postgres/adapter.ts";
import type { PostgresConnection } from "../../adapters/postgres/connection.ts";
import { EventStore, type EventStoreHooks } from "../../libraries/event-store.ts";
import { Projector } from "../../libraries/projector.ts";
import { aggregates } from "./mocks/aggregates.ts";
import { events, EventStoreFactory } from "./mocks/events.ts";
import testAddEvent from "./store/add-event.ts";
import testAddManyEvents from "./store/add-many-events.ts";
import testCreateSnapshot from "./store/create-snapshot.ts";
import testMakeAggregateReducer from "./store/make-aggregate-reducer.ts";
import testMakeEvent from "./store/make-event.ts";
import testMakeReducer from "./store/make-reducer.ts";
import testOnceProjection from "./store/once-projection.ts";
import testRelationsProvider from "./store/providers/relations.ts";
import testPushAggregate from "./store/push-aggregate.ts";
import testPushManyAggregates from "./store/push-many-aggregates.ts";
import testReduce from "./store/reduce.ts";
import testReplayEvents from "./store/replay-events.ts";
const DB_NAME = "sandbox";
const container = await PostgresTestContainer.start("postgres:17");
const sql = postgres(container.url(DB_NAME));
const eventStoreFn = async (options: { hooks?: EventStoreHooks<EventStoreFactory> } = {}) => getEventStore(sql, options);
/*
|--------------------------------------------------------------------------------
| Database
|--------------------------------------------------------------------------------
*/
beforeAll(async () => {
await container.create(DB_NAME);
await sql`CREATE SCHEMA "event_store"`;
await sql`
CREATE TABLE IF NOT EXISTS "event_store"."events" (
"id" varchar PRIMARY KEY NOT NULL,
"stream" varchar NOT NULL,
"type" varchar NOT NULL,
"data" jsonb NOT NULL,
"meta" jsonb NOT NULL,
"recorded" varchar NOT NULL,
"created" varchar NOT NULL
)
`;
await sql`
CREATE TABLE IF NOT EXISTS "event_store"."relations" (
"id" serial PRIMARY KEY NOT NULL,
"key" varchar NOT NULL,
"stream" varchar NOT NULL,
UNIQUE ("key", "stream")
)
`;
await sql`
CREATE TABLE IF NOT EXISTS "event_store"."snapshots" (
"id" serial PRIMARY KEY NOT NULL,
"name" varchar NOT NULL,
"stream" varchar NOT NULL,
"cursor" varchar NOT NULL,
"state" jsonb NOT NULL,
UNIQUE ("name", "stream")
)
`;
await sql`CREATE INDEX IF NOT EXISTS "relations_key_index" ON "event_store"."relations" USING btree ("key")`;
await sql`CREATE INDEX IF NOT EXISTS "relations_stream_index" ON "event_store"."relations" USING btree ("stream")`;
await sql`CREATE INDEX IF NOT EXISTS "events_stream_index" ON "event_store"."events" USING btree ("stream")`;
await sql`CREATE INDEX IF NOT EXISTS "events_type_index" ON "event_store"."events" USING btree ("type")`;
await sql`CREATE INDEX IF NOT EXISTS "events_recorded_index" ON "event_store"."events" USING btree ("recorded")`;
await sql`CREATE INDEX IF NOT EXISTS "events_created_index" ON "event_store"."events" USING btree ("created")`;
await sql`CREATE INDEX IF NOT EXISTS "snapshots_name_stream_cursor_index" ON "event_store"."snapshots" USING btree ("name","stream","cursor")`;
});
afterEach(async () => {
await container.client(DB_NAME)`TRUNCATE "event_store"."relations","event_store"."events","event_store"."snapshots" CASCADE`;
});
afterAll(async () => {
await container.stop();
});
/*
|--------------------------------------------------------------------------------
| Tests
|--------------------------------------------------------------------------------
*/
describe("Adapter > Postgres", () => {
testRelationsProvider(eventStoreFn);
testAddEvent(eventStoreFn);
testAddManyEvents(eventStoreFn);
testCreateSnapshot(eventStoreFn);
testMakeEvent(eventStoreFn);
testMakeReducer(eventStoreFn);
testMakeAggregateReducer(eventStoreFn);
testReplayEvents(eventStoreFn);
testReduce(eventStoreFn);
testOnceProjection(eventStoreFn);
testPushAggregate(eventStoreFn);
testPushManyAggregates(eventStoreFn);
});
/*
|--------------------------------------------------------------------------------
| Utilities
|--------------------------------------------------------------------------------
*/
async function getEventStore(connection: PostgresConnection, { hooks = {} }: { hooks?: EventStoreHooks<EventStoreFactory> }) {
const store = new EventStore({
adapter: new PostgresAdapter(connection, { schema: "event_store" }),
events,
aggregates,
hooks,
});
const projector = new Projector<EventStoreFactory>();
if (hooks.onEventsInserted === undefined) {
store.onEventsInserted(async (records, { batch }) => {
if (batch !== undefined) {
await projector.pushMany(batch, records);
} else {
for (const record of records) {
await projector.push(record, { hydrated: false, outdated: false });
}
}
});
}
return { store, projector };
}

View File

@@ -0,0 +1,220 @@
import { assertEquals, assertObjectMatch, assertRejects } from "@std/assert";
import { it } from "@std/testing/bdd";
import { EventInsertionError, EventValidationError } from "../../../libraries/errors.ts";
import { makeId } from "../../../libraries/nanoid.ts";
import type { EventStoreFactory } from "../mocks/events.ts";
import { describe } from "../utilities/describe.ts";
export default describe<EventStoreFactory>(".addEvent", (getEventStore) => {
it("should throw a 'EventValidationError' when providing bad event data", async () => {
const { store } = await getEventStore();
await assertRejects(
async () =>
store.pushEvent(
store.event({
type: "user:created",
data: {
name: {
given: "John",
familys: "Doe",
},
email: "john.doe@fixture.none",
},
} as any),
),
EventValidationError,
);
});
it("should throw a 'EventInsertionError' on event insertion error", async () => {
const { store } = await getEventStore();
store.events.insert = async () => {
throw new Error("Fake Insert Error");
};
await assertRejects(
async () =>
store.pushEvent(
store.event({
type: "user:created",
data: {
name: {
given: "John",
family: "Doe",
},
email: "john.doe@fixture.none",
},
meta: { auditor: "foo" },
}),
),
EventInsertionError,
new EventInsertionError().message,
);
});
it("should insert and project 'user:created' event", async () => {
const { store, projector } = await getEventStore();
const stream = makeId();
const event = store.event({
stream,
type: "user:created",
data: {
name: {
given: "John",
family: "Doe",
},
email: "john.doe@fixture.none",
},
meta: { auditor: "foo" },
});
let projectedResult: string = "";
projector.on("user:created", async (record) => {
projectedResult = `${record.data.name?.given} ${record.data.name?.family} | ${record.data.email}`;
});
await store.pushEvent(event);
assertObjectMatch(await store.events.getByStream(stream).then((rows: any) => rows[0]), event);
assertEquals(projectedResult, "John Doe | john.doe@fixture.none");
});
it("should insert 'user:created' and ignore 'project' error", async () => {
const { store, projector } = await getEventStore({
hooks: {
async onError() {
// ...
},
},
});
const stream = makeId();
const event = store.event({
stream,
type: "user:created",
data: {
name: {
given: "John",
family: "Doe",
},
email: "john.doe@fixture.none",
},
meta: {
auditor: "admin",
},
});
projector.on("user:created", async () => {
throw new Error();
});
await store.pushEvent(event);
assertObjectMatch(await store.events.getByStream(stream).then((rows: any) => rows[0]), event);
});
it("should insert 'user:created' and add it to 'tenant:xyz' relation", async () => {
const { store, projector } = await getEventStore();
const key = `tenant:${makeId()}`;
projector.on("user:created", async ({ stream }) => {
await store.relations.insert(key, stream);
});
await store.pushEvent(
store.event({
type: "user:created",
data: {
name: {
given: "John",
family: "Doe",
},
email: "john.doe@fixture.none",
},
meta: {
auditor: "admin",
},
}),
);
const res1 = await store.getEventsByRelations([key]);
assertEquals(res1.length, 1);
await store.pushEvent(
store.event({
type: "user:created",
data: {
name: {
given: "Jane",
family: "Doe",
},
email: "jane.doe@fixture.none",
},
meta: {
auditor: "admin",
},
}),
);
const res2 = await store.getEventsByRelations([key]);
assertEquals(res2.length, 2);
});
it("should insert 'user:email-set' and remove it from 'tenant:xyz' relations", async () => {
const { store, projector } = await getEventStore();
const key = `tenant:${makeId()}`;
projector.on("user:created", async ({ stream }) => {
await store.relations.insert(key, stream);
});
projector.on("user:email-set", async ({ stream }) => {
await store.relations.remove(key, stream);
});
await store.pushEvent(
store.event({
stream: "user-1",
type: "user:created",
data: {
name: {
given: "John",
family: "Doe",
},
email: "john.doe@fixture.none",
},
meta: {
auditor: "admin",
},
}),
);
const res1 = await store.getEventsByRelations([key]);
assertEquals(res1.length, 1);
await store.pushEvent(
store.event({
stream: "user-1",
type: "user:email-set",
data: "jane.doe@fixture.none",
meta: {
auditor: "super",
},
}),
);
const res2 = await store.getEventsByRelations([key]);
assertEquals(res2.length, 0);
});
});

View File

@@ -0,0 +1,108 @@
import { assertEquals, assertObjectMatch, assertRejects } from "@std/assert";
import { it } from "@std/testing/bdd";
import { nanoid } from "nanoid";
import { EventValidationError } from "../../../mod.ts";
import type { EventStoreFactory } from "../mocks/events.ts";
import { userReducer } from "../mocks/user-reducer.ts";
import { describe } from "../utilities/describe.ts";
export default describe<EventStoreFactory>(".addSequence", (getEventStore) => {
it("should insert 'user:created', 'user:name:given-set', and 'user:email-set' in a sequence of events", async () => {
const { store } = await getEventStore();
const stream = nanoid();
const events = [
store.event({
stream,
type: "user:created",
data: {
name: {
given: "Jane",
family: "Doe",
},
email: "jane.doe@fixture.none",
},
meta: {
auditor: "admin",
},
}),
store.event({
stream,
type: "user:name:given-set",
data: "John",
meta: {
auditor: "admin",
},
}),
store.event({
stream,
type: "user:email-set",
data: "john@doe.com",
meta: {
auditor: "admin",
},
}),
];
await store.pushManyEvents(events);
const records = await store.getEventsByStreams([stream]);
assertEquals(records.length, 3);
records.forEach((record, index) => {
assertObjectMatch(record, events[index]);
});
const state = await store.reduce({ name: "user", stream, reducer: userReducer });
assertEquals(state?.name.given, "John");
assertEquals(state?.email, "john@doe.com");
});
it("should not commit any events when insert fails", async () => {
const { store } = await getEventStore();
const stream = nanoid();
await assertRejects(
async () =>
store.pushManyEvents([
store.event({
stream,
type: "user:created",
data: {
name: {
given: "Jane",
family: "Doe",
},
email: "jane.doe@fixture.none",
},
meta: {
auditor: "admin",
},
}),
store.event({
stream,
type: "user:name:given-set",
data: {
givens: "John",
},
} as any),
store.event({
stream,
type: "user:email-set",
data: "john@doe.com",
meta: {
auditor: "admin",
},
}),
]),
EventValidationError,
);
const records = await store.getEventsByStreams([stream]);
assertEquals(records.length, 0);
});
});

View File

@@ -0,0 +1,91 @@
import { assertEquals, assertNotEquals, assertObjectMatch } from "@std/assert";
import { it } from "@std/testing/bdd";
import { nanoid } from "nanoid";
import type { EventStoreFactory } from "../mocks/events.ts";
import { userReducer } from "../mocks/user-reducer.ts";
import { describe } from "../utilities/describe.ts";
export default describe<EventStoreFactory>(".createSnapshot", (getEventStore) => {
it("should create a new snapshot", async () => {
const { store } = await getEventStore();
const stream = nanoid();
await store.pushEvent(
store.event({
stream,
type: "user:created",
data: {
name: {
given: "John",
family: "Doe",
},
email: "john.doe@fixture.none",
},
meta: {
auditor: "super",
},
}),
);
await store.pushEvent(
store.event({
stream,
type: "user:email-set",
data: "jane.doe@fixture.none",
meta: {
auditor: "super",
},
}),
);
await store.pushEvent(
store.event({
stream,
type: "user:deactivated",
meta: {
auditor: "super",
},
}),
);
await store.createSnapshot({ name: "user", stream, reducer: userReducer });
const snapshot = await store.snapshots.getByStream("user", stream);
assertNotEquals(snapshot, undefined);
assertObjectMatch(snapshot!.state, {
name: {
given: "John",
family: "Doe",
},
email: "jane.doe@fixture.none",
active: false,
});
await store.pushEvent(
store.event({
stream,
type: "user:activated",
meta: {
auditor: "super",
},
}),
);
const events = await store.events.getByStream(stream, { cursor: snapshot!.cursor });
assertEquals(events.length, 1);
const state = await store.reduce({ name: "user", stream, reducer: userReducer });
assertObjectMatch(state!, {
name: {
given: "John",
family: "Doe",
},
email: "jane.doe@fixture.none",
active: true,
});
});
});

View File

@@ -0,0 +1,25 @@
import { assertEquals } from "@std/assert";
import { it } from "@std/testing/bdd";
import type { EventStoreFactory } from "../mocks/events.ts";
import { describe } from "../utilities/describe.ts";
export default describe<EventStoreFactory>(".makeAggregateReducer", (getEventStore) => {
it("should reduce a user", async () => {
const { store } = await getEventStore();
const userA = await store.aggregate("user").create({ given: "John", family: "Doe" }, "john.doe@fixture.none").setGivenName("Jane").save();
await userA.snapshot();
await userA.setFamilyName("Smith").setEmail("jane.smith@fixture.none", "system").save();
const userB = await store.aggregate("user").getById(userA.id);
if (userB === undefined) {
throw new Error("Expected user to exist");
}
assertEquals(userB.fullName(), "Jane Smith");
assertEquals(userB.email, "jane.smith@fixture.none");
});
});

View File

@@ -0,0 +1,89 @@
import { assertEquals, assertLess } from "@std/assert";
import { it } from "@std/testing/bdd";
import { RelationPayload } from "../../../types/adapter.ts";
import type { EventStoreFactory } from "../mocks/events.ts";
import { describe } from "../utilities/describe.ts";
export default describe<EventStoreFactory>(".makeEvent", (getEventStore) => {
it("should make and performantly batch insert a list of events directly", async () => {
const { store } = await getEventStore();
const eventsToInsert = [];
const t0 = performance.now();
let count = 10_000;
while (count--) {
eventsToInsert.push(
store.event({
type: "user:created",
data: {
name: {
given: "John",
family: "Doe",
},
email: "john.doe@fixture.none",
},
meta: {
auditor: "system",
},
}),
);
}
const t1 = performance.now();
assertLess((t1 - t0) / 1000, 5);
const t3 = performance.now();
await store.events.insertMany(eventsToInsert);
const t4 = performance.now();
assertLess((t4 - t3) / 1000, 5);
const events = await store.getEvents();
assertEquals(events.length, 10_000);
});
it("should performantly create and remove event relations", async () => {
const { store } = await getEventStore();
const relations: RelationPayload[] = [];
let count = 10_000;
while (count--) {
const event = store.event({
type: "user:created",
data: {
name: {
given: "John",
family: "Doe",
},
email: "john.doe@fixture.none",
},
meta: {
auditor: "system",
},
});
relations.push({ key: `test:xyz`, stream: event.stream });
}
const t0 = performance.now();
await store.relations.insertMany(relations);
const tr0 = (performance.now() - t0) / 1000;
assertEquals((await store.relations.getByKey(`test:xyz`)).length, 10_000);
assertLess(tr0, 5);
const t1 = performance.now();
await store.relations.removeMany(relations);
const tr1 = (performance.now() - t1) / 1000;
assertEquals((await store.relations.getByKey(`test:xyz`)).length, 0);
assertLess(tr1, 10);
});
});

View File

@@ -0,0 +1,120 @@
import { assertEquals } from "@std/assert";
import { it } from "@std/testing/bdd";
import { nanoid } from "nanoid";
import { makeId } from "../../../libraries/nanoid.ts";
import type { EventStoreFactory } from "../mocks/events.ts";
import { userPostReducer } from "../mocks/user-posts-reducer.ts";
import { userReducer } from "../mocks/user-reducer.ts";
import { describe } from "../utilities/describe.ts";
export default describe<EventStoreFactory>(".makeReducer", (getEventStore) => {
it("should create a 'user' reducer and only reduce filtered events", async () => {
const { store } = await getEventStore();
const streamA = nanoid();
const streamB = nanoid();
await store.pushEvent(
store.event({
stream: streamA,
type: "user:created",
data: {
name: {
given: "John",
family: "Doe",
},
email: "john.doe@fixture.none",
},
meta: {
auditor: "system",
},
}),
);
await store.pushEvent(
store.event({
stream: streamB,
type: "user:created",
data: {
name: {
given: "Peter",
family: "Parker",
},
email: "peter.parker@fixture.none",
},
meta: {
auditor: "system",
},
}),
);
await store.pushEvent(
store.event({
stream: streamA,
type: "user:name:given-set",
data: "Jane",
meta: {
auditor: "system",
},
}),
);
await store.pushEvent(
store.event({
stream: streamA,
type: "user:email-set",
data: "jane.doe@fixture.none",
meta: {
auditor: "system",
},
}),
);
await store.pushEvent(
store.event({
stream: streamB,
type: "user:email-set",
data: "spiderman@fixture.none",
meta: {
auditor: "system",
},
}),
);
const state = await store.reduce({ name: "user", stream: streamA, reducer: userReducer, filter: { types: ["user:created", "user:email-set"] } });
assertEquals(state?.name, { given: "John", family: "Doe" });
assertEquals(state?.email, "jane.doe@fixture.none");
});
it("should create a 'post:count' reducer and retrieve post correct post count", async () => {
const { store, projector } = await getEventStore();
const auditor = nanoid();
projector.on("post:created", async ({ stream, meta: { auditor } }) => {
await store.relations.insert(`user:${auditor}:posts`, stream);
});
const post1 = makeId();
const post2 = makeId();
const post3 = makeId();
await store.pushEvent(store.event({ stream: post1, type: "post:created", data: { title: "Post #1", body: "Sample #1" }, meta: { auditor } }));
await store.pushEvent(store.event({ stream: post2, type: "post:created", data: { title: "Post #2", body: "Sample #2" }, meta: { auditor } }));
await store.pushEvent(store.event({ stream: post2, type: "post:removed", meta: { auditor } }));
await store.pushEvent(store.event({ stream: post3, type: "post:created", data: { title: "Post #3", body: "Sample #3" }, meta: { auditor } }));
const events = await store.getEventsByRelations([`user:${auditor}:posts`]);
assertEquals(events.length, 4);
const state = await store.reduce({ name: "user", relation: `user:${auditor}:posts`, reducer: userPostReducer });
assertEquals(state?.posts, [
{ id: post1, author: auditor },
{ id: post3, author: auditor },
]);
assertEquals(state?.count, 2);
});
});

View File

@@ -0,0 +1,94 @@
import { assertEquals, assertObjectMatch } from "@std/assert";
import { it } from "@std/testing/bdd";
import { makeId } from "../../../libraries/nanoid.ts";
import type { EventStoreFactory } from "../mocks/events.ts";
import { describe } from "../utilities/describe.ts";
export default describe<EventStoreFactory>("projector.once", (getEventStore) => {
it("should handle successfull projection", async () => {
const { store, projector } = await getEventStore();
const stream = makeId();
const event = store.event({
stream,
type: "user:created",
data: {
name: {
given: "John",
family: "Doe",
},
email: "john.doe@fixture.none",
},
meta: {
auditor: "foo",
},
});
let emailId: string | Error | undefined;
projector.once(
"user:created",
async () => {
return { id: "fake-email-id" };
},
{
async onError({ error }) {
emailId = error as Error;
},
async onSuccess({ data }) {
emailId = data.id;
},
},
);
await store.pushEvent(event);
assertObjectMatch(await store.events.getByStream(stream).then((rows: any) => rows[0]), event);
assertEquals(emailId, "fake-email-id");
});
it("should handle failed projection", async () => {
const { store, projector } = await getEventStore();
const stream = makeId();
const event = store.event({
stream,
type: "user:created",
data: {
name: {
given: "John",
family: "Doe",
},
email: "john.doe@fixture.none",
},
meta: {
auditor: "foo",
},
});
let emailId: string | undefined;
projector.once(
"user:created",
async () => {
fakeEmail();
},
{
async onError({ error }) {
emailId = (error as Error).message;
},
async onSuccess() {},
},
);
await store.pushEvent(event);
assertObjectMatch(await store.events.getByStream(stream).then((rows: any) => rows[0]), event);
assertEquals(emailId, "Failed to send email!");
});
});
function fakeEmail() {
throw new Error("Failed to send email!");
}

View File

@@ -0,0 +1,34 @@
import { assertEquals } from "@std/assert";
import { it } from "@std/testing/bdd";
import { nanoid } from "nanoid";
import type { EventStoreFactory } from "../../mocks/events.ts";
import { describe } from "../../utilities/describe.ts";
export default describe<EventStoreFactory>("relations", (getEventStore) => {
it("should create a new relation", async () => {
const { store } = await getEventStore();
const key = "sample";
const stream = nanoid();
await store.relations.insert(key, stream);
assertEquals(await store.relations.getByKey(key), [stream]);
});
it("should ignore duplicate relations", async () => {
const { store } = await getEventStore();
const key = "sample";
const stream = nanoid();
await store.relations.insertMany([
{ key, stream },
{ key, stream },
]);
await store.relations.insert(key, stream);
assertEquals(await store.relations.getByKey(key), [stream]);
});
});

View File

@@ -0,0 +1,42 @@
import { assertEquals, assertObjectMatch } from "@std/assert";
import { it } from "@std/testing/bdd";
import type { EventStoreFactory } from "../mocks/events.ts";
import { userReducer } from "../mocks/user-reducer.ts";
import { describe } from "../utilities/describe.ts";
export default describe<EventStoreFactory>(".pushAggregate", (getEventStore) => {
it("should successfully commit pending aggregate events to the event store", async () => {
const { store } = await getEventStore();
const user = store
.aggregate("user")
.create({ given: "Jane", family: "Doe" }, "jane.doe@fixture.none")
.setGivenName("John")
.setEmail("john.doe@fixture.none", "admin");
assertEquals(user.toPending().length, 3);
await store.pushAggregate(user);
assertEquals(user.toPending().length, 0);
const records = await store.getEventsByStreams([user.id]);
assertEquals(records.length, 3);
assertObjectMatch(records[0], { stream: user.id, data: { name: { given: "Jane", family: "Doe" }, email: "jane.doe@fixture.none" } });
assertObjectMatch(records[1], { stream: user.id, data: "John" });
assertObjectMatch(records[2], { stream: user.id, data: "john.doe@fixture.none", meta: { auditor: "admin" } });
const state = await store.reduce({ name: "user", stream: user.id, reducer: userReducer });
assertObjectMatch(state!, {
name: {
given: "John",
family: "Doe",
},
email: "john.doe@fixture.none",
});
});
});

View File

@@ -0,0 +1,62 @@
import { assertEquals, assertObjectMatch } from "@std/assert";
import { it } from "@std/testing/bdd";
import type { EventStoreFactory } from "../mocks/events.ts";
import { userReducer } from "../mocks/user-reducer.ts";
import { describe } from "../utilities/describe.ts";
export default describe<EventStoreFactory>(".pushManyAggregates", (getEventStore) => {
it("should successfully commit pending aggregates events to the event store", async () => {
const { store } = await getEventStore();
const userA = store
.aggregate("user")
.create({ given: "Jane", family: "Doe" }, "jane.doe@fixture.none")
.setGivenName("John")
.setEmail("john.doe@fixture.none", "admin");
const userB = store
.aggregate("user")
.create({ given: "Peter", family: "Doe" }, "peter.doe@fixture.none")
.setGivenName("Barry")
.setEmail("barry.doe@fixture.none", "admin");
assertEquals(userA.toPending().length, 3);
assertEquals(userB.toPending().length, 3);
await store.pushManyAggregates([userA, userB]);
assertEquals(userA.toPending().length, 0);
assertEquals(userB.toPending().length, 0);
const records = await store.getEventsByStreams([userA.id, userB.id]);
assertEquals(records.length, 6);
assertObjectMatch(records[0], { stream: userA.id, data: { name: { given: "Jane", family: "Doe" }, email: "jane.doe@fixture.none" } });
assertObjectMatch(records[1], { stream: userA.id, data: "John" });
assertObjectMatch(records[2], { stream: userA.id, data: "john.doe@fixture.none", meta: { auditor: "admin" } });
assertObjectMatch(records[3], { stream: userB.id, data: { name: { given: "Peter", family: "Doe" }, email: "peter.doe@fixture.none" } });
assertObjectMatch(records[4], { stream: userB.id, data: "Barry" });
assertObjectMatch(records[5], { stream: userB.id, data: "barry.doe@fixture.none", meta: { auditor: "admin" } });
const stateA = await store.reduce({ name: "user", stream: userA.id, reducer: userReducer });
const stateB = await store.reduce({ name: "user", stream: userB.id, reducer: userReducer });
assertObjectMatch(stateA!, {
name: {
given: "John",
family: "Doe",
},
email: "john.doe@fixture.none",
});
assertObjectMatch(stateB!, {
name: {
given: "Barry",
family: "Doe",
},
email: "barry.doe@fixture.none",
});
});
});

View File

@@ -0,0 +1,103 @@
import { assertEquals } from "@std/assert";
import { it } from "@std/testing/bdd";
import { nanoid } from "nanoid";
import type { EventStoreFactory } from "../mocks/events.ts";
import { userReducer } from "../mocks/user-reducer.ts";
import { describe } from "../utilities/describe.ts";
export default describe<EventStoreFactory>(".reduce", (getEventStore) => {
it("should return reduced state", async () => {
const { store } = await getEventStore();
const stream = nanoid();
await store.pushEvent(
store.event({
stream,
type: "user:created",
data: {
name: {
given: "John",
family: "Doe",
},
email: "john.doe@fixture.none",
},
meta: {
auditor: "super",
},
}),
);
await store.pushEvent(
store.event({
stream,
type: "user:email-set",
data: "jane.doe@fixture.none",
meta: {
auditor: "super",
},
}),
);
const state = await store.reduce({ name: "user", stream, reducer: userReducer });
assertEquals(state, {
name: { given: "John", family: "Doe" },
email: "jane.doe@fixture.none",
active: true,
posts: { list: [], count: 0 },
});
});
it("should return snapshot if it exists and no new events were found", async () => {
const { store } = await getEventStore();
const stream = nanoid();
await store.pushEvent(
store.event({
stream,
type: "user:created",
data: {
name: {
given: "John",
family: "Doe",
},
email: "john.doe@fixture.none",
},
meta: {
auditor: "super",
},
}),
);
await store.pushEvent(
store.event({
stream,
type: "user:email-set",
data: "jane.doe@fixture.none",
meta: {
auditor: "super",
},
}),
);
await store.createSnapshot({ name: "user", stream, reducer: userReducer });
const state = await store.reduce({ name: "user", stream, reducer: userReducer });
assertEquals(state, {
name: { given: "John", family: "Doe" },
email: "jane.doe@fixture.none",
active: true,
posts: { list: [], count: 0 },
});
});
it("should return undefined if stream does not have events", async () => {
const stream = nanoid();
const { store } = await getEventStore();
const state = await store.reduce({ name: "user", stream, reducer: userReducer });
assertEquals(state, undefined);
});
});

View File

@@ -0,0 +1,94 @@
import { assertObjectMatch } from "@std/assert";
import { it } from "@std/testing/bdd";
import { nanoid } from "nanoid";
import { EventStoreFactory } from "../mocks/events.ts";
import { describe } from "../utilities/describe.ts";
export default describe<EventStoreFactory>(".replayEvents", (getEventStore) => {
it("should replay events", async () => {
const { store, projector } = await getEventStore();
const stream = nanoid();
const record: Record<string, any> = {};
projector.on("user:created", async ({ stream, data: { name, email } }) => {
record[stream] = {
name,
email,
};
});
projector.on("user:name:given-set", async ({ stream, data }) => {
record[stream].name.given = data;
});
projector.on("user:email-set", async ({ stream, data }) => {
record[stream].email = data;
});
await store.pushManyEvents([
store.event({
stream,
type: "user:created",
data: {
name: {
given: "Jane",
family: "Doe",
},
email: "jane.doe@fixture.none",
},
meta: {
auditor: "admin",
},
}),
store.event({
stream,
type: "user:name:given-set",
data: "John",
meta: {
auditor: "admin",
},
}),
store.event({
stream,
type: "user:email-set",
data: "john@doe.com",
meta: {
auditor: "admin",
},
}),
]);
assertObjectMatch(record, {
[stream]: {
name: {
given: "John",
family: "Doe",
},
email: "john@doe.com",
},
});
delete record[stream];
const promises = [];
const records = await store.getEventsByStreams([stream]);
for (const record of records) {
promises.push(projector.push(record, { hydrated: true, outdated: false }));
}
await Promise.all(promises);
assertObjectMatch(record, {
[stream]: {
name: {
given: "John",
family: "Doe",
},
email: "john@doe.com",
},
});
});
});

View File

@@ -0,0 +1,17 @@
import { describe as desc } from "@std/testing/bdd";
import { EventFactory } from "../../../libraries/event-factory.ts";
import { EventStore, type EventStoreHooks } from "../../../libraries/event-store.ts";
import { Projector } from "../../../libraries/projector.ts";
export function describe<TEventFactory extends EventFactory>(
name: string,
runner: (getEventStore: EventStoreFn<TEventFactory>) => void,
): (getEventStore: EventStoreFn<TEventFactory>) => void {
return (getEventStore: EventStoreFn<TEventFactory>) => desc(name, () => runner(getEventStore));
}
type EventStoreFn<TEventFactory extends EventFactory> = (options?: { hooks?: EventStoreHooks<TEventFactory> }) => Promise<{
store: EventStore<TEventFactory, any, any>;
projector: Projector<TEventFactory>;
}>;

196
types/adapter.ts Normal file
View File

@@ -0,0 +1,196 @@
import type { EventRecord } from "../libraries/event.ts";
import type { EventReadOptions } from "./query.ts";
export type EventStoreAdapter<TDatabase> = {
readonly db: TDatabase;
readonly providers: {
readonly events: EventsProvider;
readonly relations: RelationsProvider;
readonly snapshots: SnapshotsProvider;
};
};
/*
|--------------------------------------------------------------------------------
| Events Provider
|--------------------------------------------------------------------------------
*/
export type EventsProvider = {
/**
* Insert a new event record to the events table.
*
* @param record - Event record to insert.
*/
insert(record: EventRecord): Promise<void>;
/**
* Insert many new event records to the events table.
*
* @param records - Event records to insert.
* @param batchSize - Batch size for the insert loop. Default: 1_000
*/
insertMany(records: EventRecord[], batchSize?: number): Promise<void>;
/**
* Retrieve all the events in the events table. Optionally a cursor and direction
* can be provided to reduce the list of events returned.
*
* @param options - Find options.
*/
get(options?: EventReadOptions): Promise<EventRecord[]>;
/**
* Get events within the given stream.
*
* @param stream - Stream to fetch events for.
* @param options - Read options for modifying the result.
*/
getByStream(stream: string, options?: EventReadOptions): Promise<EventRecord[]>;
/**
* Get events within given list of streams.
*
* @param streams - Stream to get events for.
* @param options - Read options for modifying the result.
*/
getByStreams(streams: string[], options?: EventReadOptions): Promise<EventRecord[]>;
/**
* Get a single event by its id.
*
* @param id - Event id.
*/
getById(id: string): Promise<EventRecord | undefined>;
/**
* Check if the given event is outdated in relation to the local event data.
*/
checkOutdated({ stream, type, created }: EventRecord): Promise<boolean>;
};
/*
|--------------------------------------------------------------------------------
| Relations
|--------------------------------------------------------------------------------
*/
export type RelationsProvider = {
/**
* Handle incoming relation operations.
*
* @param relations - List of relation operations to execute.
*/
handle(relations: Relation[]): Promise<void>;
/**
* Add stream to the relations table.
*
* @param key - Relational key to add stream to.
* @param stream - Stream to add to the key.
*/
insert(key: string, stream: string): Promise<void>;
/**
* Add stream to many relational keys onto the relations table.
*
* @param relations - Relations to insert.
* @param batchSize - Batch size for the insert loop. Default: 1_000
*/
insertMany(relations: RelationPayload[], batchSize?: number): Promise<void>;
/**
* Get a list of event streams registered under the given relational key.
*
* @param key - Relational key to get event streams for.
*/
getByKey(key: string): Promise<string[]>;
/**
* Get a list of event streams registered under the given relational keys.
*
* @param keys - Relational keys to get event streams for.
*/
getByKeys(keys: string[]): Promise<string[]>;
/**
* Removes a stream from the relational table.
*
* @param key - Relational key to remove stream from.
* @param stream - Stream to remove from relation.
*/
remove(key: string, stream: string): Promise<void>;
/**
* Removes multiple relational entries.
*
* @param relations - Relations to remove stream from.
* @param batchSize - Batch size for the insert loop. Default: 1_000
*/
removeMany(relations: RelationPayload[], batchSize?: number): Promise<void>;
/**
* Remove all relations bound to the given relational keys.
*
* @param keys - Relational keys to remove from the relational table.
*/
removeByKeys(keys: string[]): Promise<void>;
/**
* Remove all relations bound to the given streams.
*
* @param streams - Streams to remove from the relational table.
*/
removeByStreams(streams: string[]): Promise<void>;
};
export type RelationHandler<TRecord extends EventRecord> = (record: TRecord) => Promise<Omit<Relation, "stream">[]>;
export type RelationPayload = Omit<Relation, "op">;
export type Relation = {
op: "insert" | "remove";
key: string;
stream: string;
};
/*
|--------------------------------------------------------------------------------
| Snapshots
|--------------------------------------------------------------------------------
*/
export type SnapshotsProvider = {
/**
* Add snapshot state under given reducer stream.
*
* @param name - Name of the reducer the snapshot is attached to.
* @param stream - Stream the snapshot is attached to.
* @param cursor - Cursor timestamp for the last event used in the snapshot.
* @param state - State of the reduced events.
*/
insert(name: string, stream: string, cursor: string, state: Record<string, unknown>): Promise<void>;
/**
* Get snapshot state by stream.
*
* @param name - Name of the reducer which the state was created.
* @param stream - Stream the state was reduced for.
*/
getByStream(name: string, stream: string): Promise<Snapshot | undefined>;
/**
* Removes a snapshot for the given reducer stream.
*
* @param name - Name of the reducer the snapshot is attached to.
* @param stream - Stream to remove from snapshots.
*/
remove(name: string, stream: string): Promise<void>;
};
export type Snapshot = {
stream: string;
name: string;
cursor: string;
state: Record<string, unknown>;
};

32
types/common.ts Normal file
View File

@@ -0,0 +1,32 @@
/**
* Represents an empty object.
*/
export type Empty = Record<string, never>;
/**
* Represent an unknown object.
*/
export type Unknown = Record<string, unknown>;
/**
* Represents a subscription that exposes a way to unsubscribe.
*
* @example
*
* ```ts
* function subscribe(): Subscription {
* const interval = setInterval(() => console.log("foo"), 1000);
* return {
* unsubscribe() {
* clearInterval(interval);
* }
* }
* }
* ```
*/
export type Subscription = {
/**
* Gracefully terminate a decoupled subscriber.
*/
unsubscribe: () => void;
};

50
types/projector.ts Normal file
View File

@@ -0,0 +1,50 @@
import type { EventRecord } from "../libraries/event.ts";
export type BatchedProjectorListeners<TRecord extends EventRecord = EventRecord> = Record<string, Set<BatchedProjectorListenerFn<TRecord>> | undefined>;
export type ProjectorListeners<TRecord extends EventRecord = EventRecord> = Record<string, Set<ProjectorListenerFn<TRecord>> | undefined>;
export type ProjectorMessage<TRecord extends EventRecord = EventRecord> = {
record: TRecord;
status: ProjectionStatus;
};
export type BatchedProjectorListenerFn<TRecord extends EventRecord = EventRecord> = (records: TRecord[]) => void;
export type ProjectorListenerFn<TRecord extends EventRecord = EventRecord> = (record: TRecord, status: ProjectionStatus) => void;
export type ProjectionHandler<TRecord extends EventRecord = EventRecord, TSuccessData extends Record<string, any> | void = void> = TSuccessData extends void
? (record: TRecord) => Promise<void>
: (record: TRecord) => Promise<TSuccessData>;
export type BatchedProjectionHandler<TRecord extends EventRecord = EventRecord> = (records: TRecord[]) => Promise<void>;
export type ProjectionStatus = {
/**
* Has the event run through projections previously. In which case we do
* not want to re-run one time projections that should not execute during
* replay events.
*/
hydrated: boolean;
/**
* Is the incoming event older than another event of the same type in
* the same stream.
*/
outdated: boolean;
};
export type ProjectionFilter = {
/**
* Hydrated events represents events that are not seen for the first time
* in the entirety of its lifetime across all distributed instances.
*/
allowHydratedEvents: boolean;
/**
* Outdated events represents events that have already seen the same type
* at a later occurrence. Eg. If incoming event is older than the latest
* local event of the same type, it is considered outdated.
*/
allowOutdatedEvents: boolean;
};

72
types/query.ts Normal file
View File

@@ -0,0 +1,72 @@
import type { Reducer } from "../libraries/reducer.ts";
export type ReduceQuery<TReducer extends Reducer> =
| ({
/**
* Name of the reducer, must be a unique identifier as its used by snapshotter
* to store, and manage state snapshots for event streams.
*/
name: string;
/**
* Stream to fetch events from and pass to the reducer method.
*/
stream: string;
/**
* Reducer method to pass resolved events to.
*/
reducer: TReducer;
relation?: never;
} & EventReadFilter)
| ({
/**
* Name of the reducer, must be a unique identifier as its used by snapshotter
* to store, and manage state snapshots for event streams.
*/
name: string;
/**
* Relational key resolving streams to fetch events from and pass to the
* reducer method.
*/
relation: string;
/**
* Reducer method to pass resolved events to.
*/
reducer: TReducer;
stream?: never;
} & EventReadFilter);
export type EventReadOptions = EventReadFilter & {
/**
* Fetches events from the specific cursor, which uses the local event
* records `recorded` timestamp.
*/
cursor?: string;
/**
* Fetch events in ascending or descending order. Default: "asc"
*/
direction?: 1 | -1 | "asc" | "desc";
/**
* Limit the number of events returned.
*/
limit?: number;
};
export type EventReadFilter = {
/**
* Filter options for how events are pulled from the store.
*/
filter?: {
/**
* Only include events in the given types.
*/
types?: string[];
};
};

5
types/utilities.ts Normal file
View File

@@ -0,0 +1,5 @@
import type { Empty } from "./common.ts";
export type ExcludeEmptyFields<T> = {
[K in keyof T as T[K] extends Empty ? never : K]: T[K];
};