feat: version 2 beta

This commit is contained in:
2025-04-25 22:39:47 +00:00
commit 1e58359905
75 changed files with 6899 additions and 0 deletions

View File

@@ -0,0 +1,36 @@
import type { IndexedDatabase } from "@valkyr/db";
import { Event } from "../../libraries/event.ts";
import { EventStoreAdapter } from "../../types/adapter.ts";
import { Adapter, Collections, EventStoreDB, getEventStoreDatabase } from "./database.ts";
import { BrowserEventsProvider } from "./providers/events.ts";
import { BrowserRelationsProvider } from "./providers/relations.ts";
import { BrowserSnapshotsProvider } from "./providers/snapshots.ts";
/**
* A browser-based event store adapter that integrates database-specific providers.
*
* The `BrowserAdapter` enables event sourcing in a browser environment by utilizing
* IndexedDB for storage. It provides implementations for event storage, relations,
* and snapshots, allowing seamless integration with the shared event store interface.
*
* @template TEvent - The type of events managed by the event store.
*/
export class BrowserAdapter<const TEvent extends Event> implements EventStoreAdapter<EventStoreDB> {
readonly #database: IndexedDatabase<Collections>;
providers: EventStoreAdapter<TEvent>["providers"];
constructor(database: Adapter, name = "valkyr:event-store") {
this.#database = getEventStoreDatabase(name, database) as IndexedDatabase<Collections>;
this.providers = {
events: new BrowserEventsProvider(this.#database.collection("events")),
relations: new BrowserRelationsProvider(this.#database.collection("relations")),
snapshots: new BrowserSnapshotsProvider(this.#database.collection("snapshots")),
};
}
get db(): IndexedDatabase<Collections> {
return this.#database;
}
}

View File

@@ -0,0 +1,73 @@
import { IndexedDatabase, MemoryDatabase } from "@valkyr/db";
import { EventRecord } from "../../libraries/event.ts";
export function getEventStoreDatabase(name: string, adapter: Adapter): EventStoreDB {
switch (adapter) {
case "indexeddb": {
return new IndexedDatabase<Collections>({
name,
version: 1,
registrars: [
{
name: "events",
indexes: [
["stream", { unique: false }],
["created", { unique: false }],
["recorded", { unique: false }],
],
},
{
name: "relations",
indexes: [
["key", { unique: false }],
["stream", { unique: false }],
],
},
{
name: "snapshots",
indexes: [
["name", { unique: false }],
["stream", { unique: false }],
["cursor", { unique: false }],
],
},
],
});
}
case "memorydb": {
return new MemoryDatabase<Collections>({
name,
registrars: [{ name: "events" }, { name: "relations" }, { name: "snapshots" }],
});
}
}
}
/*
|--------------------------------------------------------------------------------
| Types
|--------------------------------------------------------------------------------
*/
export type EventStoreDB = IndexedDatabase<Collections> | MemoryDatabase<Collections>;
export type Adapter = "indexeddb" | "memorydb";
export type Collections = {
events: EventRecord;
relations: Relation;
snapshots: Snapshot;
};
export type Relation = {
key: string;
stream: string;
};
export type Snapshot = {
name: string;
stream: string;
cursor: string;
state: Record<string, unknown>;
};

View File

@@ -0,0 +1,122 @@
import type { Collection } from "@valkyr/db";
import { EventRecord } from "../../../libraries/event.ts";
import type { EventsProvider } from "../../../types/adapter.ts";
import type { EventReadOptions } from "../../../types/query.ts";
export class BrowserEventsProvider implements EventsProvider {
constructor(readonly events: Collection<EventRecord>) {}
/**
* Insert a new event record to the events table.
*
* @param record - Event record to insert.
* @param tx - Transaction to insert the record within. (Optional)
*/
async insert(record: EventRecord): Promise<void> {
await this.events.insertOne(record);
}
/**
* Insert many new event records to the events table.
*
* @param records - Event records to insert.
* @param batchSize - Batch size for the insert loop.
*/
async insertMany(records: EventRecord[], batchSize: number = 1_000): Promise<void> {
for (let i = 0; i < records.length; i += batchSize) {
await this.events.insertMany(records.slice(i, i + batchSize));
}
}
/**
* Retrieve all the events in the events table. Optionally a cursor and direction
* can be provided to reduce the list of events returned.
*
* @param options - Find options.
*/
async get({ filter, cursor, direction }: EventReadOptions = {}): Promise<EventRecord[]> {
const query: any = {};
if (filter?.types !== undefined) {
withTypes(query, filter.types);
}
if (cursor !== undefined) {
withCursor(query, cursor, direction);
}
return (await this.events.find(query, { sort: { created: 1 } })) as EventRecord[];
}
/**
* Get events within the given stream.
*
* @param stream - Stream to fetch events for.
* @param options - Read options for modifying the result.
*/
async getByStream(stream: string, { filter, cursor, direction }: EventReadOptions = {}): Promise<EventRecord[]> {
const query: any = { stream };
if (filter?.types !== undefined) {
withTypes(query, filter.types);
}
if (cursor !== undefined) {
withCursor(query, cursor, direction);
}
return (await this.events.find(query, { sort: { created: 1 } })) as EventRecord[];
}
/**
* Get events within given list of streams.
*
* @param streams - Stream to get events for.
*/
async getByStreams(streams: string[], { filter, cursor, direction }: EventReadOptions = {}): Promise<EventRecord[]> {
const query: any = { stream: { $in: streams } };
if (filter?.types !== undefined) {
withTypes(query, filter.types);
}
if (cursor !== undefined) {
withCursor(query, cursor, direction ?? "asc");
}
return (await this.events.find(query, { sort: { created: 1 } })) as EventRecord[];
}
/**
* Get a single event by its id.
*
* @param id - Event id.
*/
async getById(id: string): Promise<EventRecord | undefined> {
return (await this.events.findById(id)) satisfies EventRecord | undefined;
}
/**
* Check if the given event is outdated in relation to the local event data.
*/
async checkOutdated({ stream, type, created }: EventRecord): Promise<boolean> {
const count = await this.events.count({
stream,
type,
created: {
$gt: created,
},
} as any);
return count > 0;
}
}
/*
|--------------------------------------------------------------------------------
| Query Builders
|--------------------------------------------------------------------------------
*/
function withTypes(filter: any, types: string[]): void {
filter.type = { $in: types };
}
function withCursor(filter: any, cursor: string, direction?: 1 | -1 | "asc" | "desc"): void {
if (cursor !== undefined) {
filter.created = {
[direction === "desc" || direction === -1 ? "$lt" : "$gt"]: cursor,
};
}
}

View File

@@ -0,0 +1,109 @@
import type { Collection } from "@valkyr/db";
import type { Relation, RelationPayload, RelationsProvider } from "../../../types/adapter.ts";
export class BrowserRelationsProvider implements RelationsProvider {
constructor(readonly relations: Collection<Relation>) {}
/**
* Handle incoming relation operations.
*
* @param relations - List of relation operations to execute.
*/
async handle(relations: Relation[]): Promise<void> {
await Promise.all([
this.insertMany(relations.filter((relation) => relation.op === "insert")),
this.removeMany(relations.filter((relation) => relation.op === "remove")),
]);
}
/**
* Add stream to the relations table.
*
* @param key - Relational key to add stream to.
* @param stream - Stream to add to the key.
*/
async insert(key: string, stream: string): Promise<void> {
await this.relations.insertOne({ key, stream });
}
/**
* Add stream to many relational keys onto the relations table.
*
* @param relations - Relations to insert.
* @param batchSize - Batch size for the insert loop.
*/
async insertMany(relations: { key: string; stream: string }[], batchSize: number = 1_000): Promise<void> {
for (let i = 0; i < relations.length; i += batchSize) {
await this.relations.insertMany(relations.slice(i, i + batchSize).map(({ key, stream }) => ({ key, stream })));
}
}
/**
* Get a list of event streams registered under the given relational key.
*
* @param key - Relational key to get event streams for.
*/
async getByKey(key: string): Promise<string[]> {
return this.relations.find({ key }).then((relations) => relations.map(({ stream }) => stream));
}
/**
* Get a list of event streams registered under the given relational keys.
*
* @param keys - Relational keys to get event streams for.
*/
async getByKeys(keys: string[]): Promise<string[]> {
return this.relations.find({ key: { $in: keys } }).then((relations) => {
const streamIds = new Set<string>();
for (const relation of relations) {
streamIds.add(relation.stream);
}
return Array.from(streamIds);
});
}
/**
* Removes a stream from the relational table.
*
* @param key - Relational key to remove stream from.
* @param stream - Stream to remove from relation.
*/
async remove(key: string, stream: string): Promise<void> {
await this.relations.remove({ key, stream });
}
/**
* Removes multiple relational entries.
*
* @param relations - Relations to remove stream from.
* @param batchSize - Batch size for the insert loop.
*/
async removeMany(relations: RelationPayload[], batchSize: number = 1_000): Promise<void> {
const promises = [];
for (let i = 0; i < relations.length; i += batchSize) {
for (const relation of relations.slice(i, i + batchSize)) {
promises.push(this.remove(relation.key, relation.stream));
}
}
await Promise.all(promises);
}
/**
* Remove all relations bound to the given relational keys.
*
* @param keys - Relational keys to remove from the relational table.
*/
async removeByKeys(keys: string[]): Promise<void> {
await this.relations.remove({ key: { $in: keys } });
}
/**
* Remove all relations bound to the given streams.
*
* @param streams - Streams to remove from the relational table.
*/
async removeByStreams(streams: string[]): Promise<void> {
await this.relations.remove({ stream: { $in: streams } });
}
}

View File

@@ -0,0 +1,39 @@
import type { Collection } from "@valkyr/db";
import type { Snapshot, SnapshotsProvider } from "../../../types/adapter.ts";
export class BrowserSnapshotsProvider implements SnapshotsProvider {
constructor(readonly snapshots: Collection<Snapshot>) {}
/**
* Add snapshot state under given reducer stream.
*
* @param name - Name of the reducer the snapshot is attached to.
* @param stream - Stream the snapshot is attached to.
* @param cursor - Cursor timestamp for the last event used in the snapshot.
* @param state - State of the reduced events.
*/
async insert(name: string, stream: string, cursor: string, state: Record<string, unknown>): Promise<void> {
await this.snapshots.insertOne({ name, stream, cursor, state });
}
/**
* Get snapshot state by stream.
*
* @param name - Name of the reducer which the state was created.
* @param stream - Stream the state was reduced for.
*/
async getByStream(name: string, stream: string): Promise<Snapshot | undefined> {
return this.snapshots.findOne({ name, stream });
}
/**
* Removes a snapshot for the given reducer stream.
*
* @param name - Name of the reducer the snapshot is attached to.
* @param stream - Stream to remove from snapshots.
*/
async remove(name: string, stream: string): Promise<void> {
await this.snapshots.remove({ name, stream });
}
}

91
adapters/mongo/adapter.ts Normal file
View File

@@ -0,0 +1,91 @@
import type { MongoConnectionUrl } from "@valkyr/testcontainers/mongodb";
import { Db, MongoClient } from "mongodb";
import { EventStoreAdapter } from "../../types/adapter.ts";
import { registrars } from "./collections/mod.ts";
import { MongoEventsProvider } from "./providers/events.ts";
import { MongoRelationsProvider } from "./providers/relations.ts";
import { MongoSnapshotsProvider } from "./providers/snapshots.ts";
import { DatabaseAccessor } from "./types.ts";
import { getCollectionsSet } from "./utilities.ts";
/**
* A server-based event store adapter that integrates database-specific providers.
*
* The `MongoAdapter` enables event sourcing in a back end environment by utilizing
* MongoDB for storage. It provides implementations for event storage, relations,
* and snapshots, allowing seamless integration with the shared event store interface.
*
* @template TEvent - The type of events managed by the event store.
*/
export class MongoAdapter implements EventStoreAdapter<DatabaseAccessor> {
readonly providers: {
readonly events: MongoEventsProvider;
readonly relations: MongoRelationsProvider;
readonly snapshots: MongoSnapshotsProvider;
};
readonly #accessor: DatabaseAccessor;
constructor(connection: MongoConnection, db: string) {
this.#accessor = getDatabaseAccessor(connection, db);
this.providers = {
events: new MongoEventsProvider(this.#accessor),
relations: new MongoRelationsProvider(this.#accessor),
snapshots: new MongoSnapshotsProvider(this.#accessor),
};
}
get db(): DatabaseAccessor {
return this.#accessor;
}
}
/**
* Takes a mongo database and registers the event store collections and
* indexes defined internally.
*
* @param db - Mongo database to register event store collections against.
* @param logger - Logger method to print internal logs.
*/
export async function register(db: Db, logger?: (...args: any[]) => any) {
const list = await getCollectionsSet(db);
for (const { name, indexes } of registrars) {
if (list.has(name)) {
continue;
}
await db.createCollection(name);
for (const [indexSpec, options] of indexes) {
await db.collection(name).createIndex(indexSpec, options);
logger?.("Mongo Event Store > Collection '%s' is indexed [%O] with options %O", name, indexSpec, options ?? {});
}
logger?.("Mongo Event Store > Collection '%s' is registered", name);
}
}
function getDatabaseAccessor(connection: MongoConnection, database: string): DatabaseAccessor {
let instance: Db | undefined;
return {
get db(): Db {
if (instance === undefined) {
instance = this.client.db(database);
}
return instance;
},
get client(): MongoClient {
if (typeof connection === "string") {
return new MongoClient(connection);
}
if (connection instanceof MongoClient) {
return connection;
}
return connection();
},
};
}
/**
* Connection which the adapter supports, this can be a `url`, a `client` instance
* or a lazy method that provided `client` instance on demand.
*/
export type MongoConnection = MongoConnectionUrl | MongoClient | (() => MongoClient);

View File

@@ -0,0 +1,49 @@
import z from "zod";
import type { CollectionRegistrar } from "../types.ts";
export const registrar: CollectionRegistrar = {
name: "events",
indexes: [
[
{
stream: 1,
},
],
[
{
type: 1,
},
],
[
{
recorded: 1,
},
],
[
{
created: 1,
},
],
],
};
export const schema = z.object({
id: z.string(),
stream: z.string(),
type: z.string(),
data: z.any(),
meta: z.any(),
recorded: z.string(),
created: z.string(),
});
export type EventSchema = {
id: string;
stream: string;
type: string;
data: Record<string, any> | null;
meta: Record<string, any> | null;
recorded: string;
created: string;
};

View File

@@ -0,0 +1,10 @@
import { CollectionRegistrar } from "../types.ts";
import { registrar as events } from "./events.ts";
import { registrar as relations } from "./relations.ts";
import { registrar as snapshots } from "./snapshots.ts";
export const registrars: CollectionRegistrar[] = [
events,
relations,
snapshots,
];

View File

@@ -0,0 +1,38 @@
import z from "zod";
import type { CollectionRegistrar } from "../types.ts";
export const registrar: CollectionRegistrar = {
name: "relations",
indexes: [
[
{
key: 1,
},
],
[
{
stream: 1,
},
],
[
{
key: 1,
stream: 1,
},
{
unique: true,
},
],
],
};
export const schema = z.object({
key: z.string(),
streams: z.string().array(),
});
export type RelationSchema = {
key: string;
streams: string[];
};

View File

@@ -0,0 +1,30 @@
import z from "zod";
import type { CollectionRegistrar } from "../types.ts";
export const registrar: CollectionRegistrar = {
name: "snapshots",
indexes: [
[
{
name: 1,
stream: 1,
cursor: 1,
},
],
],
};
export const schema = z.object({
name: z.string(),
stream: z.string(),
cursor: z.string(),
state: z.record(z.string(), z.any()),
});
export type SnapshotSchema = {
name: string;
stream: string;
cursor: string;
state: Record<string, any>;
};

View File

@@ -0,0 +1,131 @@
import type { Collection, FindCursor } from "mongodb";
import { EventRecord } from "../../../libraries/event.ts";
import type { EventsProvider } from "../../../types/adapter.ts";
import type { EventReadOptions } from "../../../types/query.ts";
import { type EventSchema, schema } from "../collections/events.ts";
import { DatabaseAccessor } from "../types.ts";
import { toParsedRecord, toParsedRecords } from "../utilities.ts";
export class MongoEventsProvider implements EventsProvider {
readonly #accessor: DatabaseAccessor;
constructor(accessor: DatabaseAccessor) {
this.#accessor = accessor;
}
get collection(): Collection<EventSchema> {
return this.#accessor.db.collection<EventSchema>("events");
}
/**
* Insert a new event record to the events table.
*
* @param record - Event record to insert.
* @param tx - Transaction to insert the record within. (Optional)
*/
async insert(record: EventRecord): Promise<void> {
await this.collection.insertOne(record, { forceServerObjectId: true });
}
/**
* Insert many new event records to the events table.
*
* @param records - Event records to insert.
*/
async insertMany(records: EventRecord[]): Promise<void> {
await this.collection.insertMany(records, { forceServerObjectId: true });
}
/**
* Retrieve all the events in the events table. Optionally a cursor and direction
* can be provided to reduce the list of events returned.
*
* @param options - Find options.
*/
async get(options: EventReadOptions = {}): Promise<EventRecord[]> {
return (await this.#withReadOptions(this.collection.find(this.#withFilters(options)), options)
.sort({ created: 1 })
.toArray()
.then(toParsedRecords(schema))) as EventRecord[];
}
/**
* Get events within the given stream.
*
* @param stream - Stream to fetch events for.
* @param options - Read options for modifying the result.
*/
async getByStream(stream: string, options: EventReadOptions = {}): Promise<EventRecord[]> {
return (await this.#withReadOptions(this.collection.find({ stream, ...this.#withFilters(options) }), options)
.sort({ created: 1 })
.toArray()
.then(toParsedRecords(schema))) as EventRecord[];
}
/**
* Get events within given list of streams.
*
* @param streams - Stream to get events for.
* @param options - Read options for modifying the result.
*/
async getByStreams(streams: string[], options: EventReadOptions = {}): Promise<EventRecord[]> {
return (await this.#withReadOptions(this.collection.find({ stream: { $in: streams }, ...this.#withFilters(options) }), options)
.sort({ created: 1 })
.toArray()
.then(toParsedRecords(schema))) as EventRecord[];
}
/**
* Get a single event by its id.
*
* @param id - Event id.
*/
async getById(id: string): Promise<EventRecord | undefined> {
return (await this.collection.findOne({ id }).then(toParsedRecord(schema))) as EventRecord | undefined;
}
/**
* Check if the given event is outdated in relation to the local event data.
*
* @param event - Event record to check for outdated state for.
*/
async checkOutdated({ stream, type, created }: EventRecord): Promise<boolean> {
const count = await this.collection.countDocuments({
stream,
type,
created: {
$gt: created,
},
});
return count > 0;
}
/*
|--------------------------------------------------------------------------------
| Utilities
|--------------------------------------------------------------------------------
*/
#withFilters({ filter }: EventReadOptions): { type?: { $in: string[] } } {
const types = filter?.types;
if (types !== undefined) {
return { type: { $in: types } };
}
return {};
}
#withReadOptions(fc: FindCursor, { cursor, direction, limit }: EventReadOptions): FindCursor {
if (cursor !== undefined) {
if (direction === "desc" || direction === -1) {
fc.filter({ created: { $lt: cursor } });
} else {
fc.filter({ created: { $gt: cursor } });
}
}
if (limit !== undefined) {
fc.limit(limit);
}
return fc;
}
}

View File

@@ -0,0 +1,168 @@
import type { Collection } from "mongodb";
import { Relation, RelationPayload, RelationsProvider } from "../../../types/adapter.ts";
import { type RelationSchema, schema } from "../collections/relations.ts";
import { DatabaseAccessor } from "../types.ts";
import { toParsedRecord, toParsedRecords } from "../utilities.ts";
export class MongoRelationsProvider implements RelationsProvider {
readonly #accessor: DatabaseAccessor;
constructor(accessor: DatabaseAccessor) {
this.#accessor = accessor;
}
get collection(): Collection<RelationSchema> {
return this.#accessor.db.collection<RelationSchema>("relations");
}
/**
* Handle incoming relation operations.
*
* @param relations - List of relation operations to execute.
*/
async handle(relations: Relation[]): Promise<void> {
await Promise.all([
this.insertMany(relations.filter((relation) => relation.op === "insert")),
this.removeMany(relations.filter((relation) => relation.op === "remove")),
]);
}
/**
* Add stream to the relations table.
*
* @param key - Relational key to add stream to.
* @param stream - Stream to add to the key.
*/
async insert(key: string, stream: string): Promise<void> {
await this.collection.updateOne({ key }, { $addToSet: { streams: stream } }, { upsert: true });
}
/**
* Add stream to many relational keys onto the relations table.
*
* @param relations - Relations to insert.
*/
async insertMany(relations: RelationPayload[], batchSize = 1_000): Promise<void> {
const reduced = relations.reduce((map, { key, stream }) => {
if (map.has(key) === false) {
map.set(key, new Set<string>());
}
map.get(key)!.add(stream);
return map;
}, new Map<string, Set<string>>());
const bulkOps = [];
for (const [key, streams] of reduced) {
bulkOps.push({
updateOne: {
filter: { key },
update: { $addToSet: { streams: { $each: Array.from(streams) } } },
upsert: true,
},
});
}
for (let i = 0; i < bulkOps.length; i += batchSize) {
await this.collection.bulkWrite(bulkOps.slice(i, i + batchSize), { ordered: false });
}
}
/**
* Get a list of event streams registered under the given relational key.
*
* @param key - Relational key to get event streams for.
*/
async getByKey(key: string): Promise<string[]> {
const relations = await this.collection.findOne({ key }).then(toParsedRecord(schema));
if (relations === undefined) {
return [];
}
return relations.streams;
}
/**
* Get a list of event streams registered under the given relational keys.
*
* @param keys - Relational keys to get event streams for.
*/
async getByKeys(keys: string[]): Promise<string[]> {
const streams = new Set<string>();
const documents = await this.collection
.find({ key: { $in: keys } })
.toArray()
.then(toParsedRecords(schema));
documents.forEach((document) => {
for (const stream of document.streams) {
streams.add(stream);
}
});
return Array.from(streams);
}
/**
* Removes a stream from the relational table.
*
* @param key - Relational key to remove stream from.
* @param stream - Stream to remove from relation.
*/
async remove(key: string, stream: string): Promise<void> {
await this.collection.updateOne({ key }, { $pull: { streams: stream } });
}
/**
* Removes multiple relational entries.
*
* @param relations - Relations to remove stream from.
*/
async removeMany(relations: RelationPayload[], batchSize = 1_000): Promise<void> {
const reduced = relations.reduce((map, { key, stream }) => {
if (!map.has(key)) {
map.set(key, new Set());
}
map.get(key)!.add(stream);
return map;
}, new Map<string, Set<string>>());
const bulkOps = [];
for (const [key, streams] of reduced) {
bulkOps.push({
updateOne: {
filter: { key },
update: { $pull: { streams: { $in: Array.from(streams) } } },
},
});
}
for (let i = 0; i < bulkOps.length; i += batchSize) {
await this.collection.bulkWrite(bulkOps.slice(i, i + batchSize), { ordered: false });
}
}
/**
* Remove all relations bound to the given relational keys.
*
* @param keys - Relational keys to remove from the relational table.
*/
async removeByKeys(keys: string[]): Promise<void> {
await this.collection.deleteMany({ key: { $in: keys } });
}
/**
* Remove all relations bound to the given streams.
*
* @param streams - Streams to remove from the relational table.
*/
async removeByStreams(streams: string[]): Promise<void> {
await this.collection.bulkWrite(
streams.map((stream) => ({
updateOne: {
filter: { streams: stream },
update: { $pull: { streams: stream } },
},
})),
);
}
}

View File

@@ -0,0 +1,50 @@
import type { Collection } from "mongodb";
import { SnapshotsProvider } from "../../../types/adapter.ts";
import { schema, type SnapshotSchema } from "../collections/snapshots.ts";
import { DatabaseAccessor } from "../types.ts";
import { toParsedRecord } from "../utilities.ts";
export class MongoSnapshotsProvider implements SnapshotsProvider {
readonly #accessor: DatabaseAccessor;
constructor(accessor: DatabaseAccessor) {
this.#accessor = accessor;
}
get collection(): Collection<SnapshotSchema> {
return this.#accessor.db.collection<SnapshotSchema>("snapshots");
}
/**
* Add snapshot state under given reducer stream.
*
* @param name - Name of the reducer the snapshot is attached to.
* @param stream - Stream the snapshot is attached to.
* @param cursor - Cursor timestamp for the last event used in the snapshot.
* @param state - State of the reduced events.
*/
async insert(name: string, stream: string, cursor: string, state: Record<string, unknown>): Promise<void> {
await this.collection.updateOne({ name }, { $set: { stream, cursor, state } }, { upsert: true });
}
/**
* Get snapshot state by stream.
*
* @param name - Name of the reducer which the state was created.
* @param stream - Stream the state was reduced for.
*/
async getByStream(name: string, stream: string): Promise<SnapshotSchema | undefined> {
return this.collection.findOne({ name, stream }).then(toParsedRecord(schema));
}
/**
* Removes a snapshot for the given reducer stream.
*
* @param name - Name of the reducer the snapshot is attached to.
* @param stream - Stream to remove from snapshots.
*/
async remove(name: string, stream: string): Promise<void> {
await this.collection.deleteOne({ name, stream });
}
}

11
adapters/mongo/types.ts Normal file
View File

@@ -0,0 +1,11 @@
import type { CreateIndexesOptions, Db, IndexSpecification, MongoClient } from "mongodb";
export type CollectionRegistrar = {
name: string;
indexes: [IndexSpecification, CreateIndexesOptions?][];
};
export type DatabaseAccessor = {
db: Db;
client: MongoClient;
};

View File

@@ -0,0 +1,43 @@
import type { Db, WithId } from "mongodb";
import type { z, ZodObject } from "zod";
/**
* Take a list of records and run it through the given zod parser. This
* ensures that all the documents in resulting list adheres to the
* expected schematics before b eing returned. Any deviation in the list
* will result in an internal error being thrown.
*
* @param parser - Zod parser to run the documents through.
*/
export function toParsedRecords<TSchema extends ZodObject>(parser: TSchema): (documents: WithId<object>[]) => z.infer<TSchema>[] {
return parser.array().parse;
}
/**
* Take a single nullable document value and run it through the given zod
* parser. This ensures that the data adheres to the expected schematics
* before being returned. Any deviation in the expected response will result
* in an internal error being thrown.
*
* @param parser - Zod parser to run the document through.
*/
export function toParsedRecord<TSchema extends ZodObject>(parser: TSchema): (document: WithId<object> | null) => z.infer<TSchema> | undefined {
return function (document) {
if (document === null) {
return undefined;
}
return parser.parse(document);
};
}
/**
* Get a Set of collections that exists on a given mongo database instance.
*
* @param db - Mongo database to fetch collection list for.
*/
export async function getCollectionsSet(db: Db) {
return db
.listCollections()
.toArray()
.then((collections) => new Set(collections.map((c) => c.name)));
}

102
adapters/postgres/README.md Normal file
View File

@@ -0,0 +1,102 @@
<p align="center">
<img src="https://user-images.githubusercontent.com/1998130/229430454-ca0f2811-d874-4314-b13d-c558de8eec7e.svg" />
</p>
# Postgres Adapter
The following instructions aims to guide you through setting up @valkyr/event-store with a postgres database.
## Event Store
Once we have defined our configs and printed our events we create a new postgres event store instance.
```ts
import { makePostgresEventStore } from "@valkyr/event-store/postgres";
import postgres from "postgres";
import { type Event, type EventRecord, events, validators } from "./generated/events.ts";
export const eventStore = makePostgresEventStore<Event>({
connection: () => postgres("postgres://${string}:${string}@${string}:${number}/${string}"), // lazy loaded connection
schema: "event_store",
events,
validators,
hooks: {
async onError(error) {
// when the event store throws unhandled errors they will end up in
// this location that can be further logged in the systems own logger
// if onError hook is not provided all unhandled errors are logged
// through the `console.error` method.
},
},
});
const projector = new Projector<EventRecord>();
eventStore.onEventsInserted(async (records, { batch }) => {
// trigger event side effects here such as sending the records through
// an event messaging system or other projection patterns
// ### Projector
// The following is an example when registering event handlers with the
// projectors instance provided by this library.
if (batch !== undefined) {
await projector.pushMany(batch, records);
} else {
for (const record of records) {
await projector.push(record, { hydrated: false, outdated: false });
}
}
});
```
## Migrations
We do not manage migrations in your local solutions so what we provide is a sample SQL script for optimal query setup. The following example assumes all event tables goes into a `event_store` schema. If you are adding these tables to a different schema or into the public default postgres space you will need to modify this sample accordingly.
```sql
CREATE SCHEMA "event_store";
-- Event Table
CREATE TABLE IF NOT EXISTS "event_store"."events" (
"id" varchar PRIMARY KEY NOT NULL,
"stream" varchar NOT NULL,
"type" varchar NOT NULL,
"data" jsonb NOT NULL,
"meta" jsonb NOT NULL,
"recorded" varchar NOT NULL,
"created" varchar NOT NULL
);
CREATE INDEX IF NOT EXISTS "events_stream_index" ON "event_store"."events" USING btree ("stream");
CREATE INDEX IF NOT EXISTS "events_type_index" ON "event_store"."events" USING btree ("type");
CREATE INDEX IF NOT EXISTS "events_recorded_index" ON "event_store"."events" USING btree ("recorded");
CREATE INDEX IF NOT EXISTS "events_created_index" ON "event_store"."events" USING btree ("created");
-- Relations Table
CREATE TABLE IF NOT EXISTS "event_store"."relations" (
"id" serial PRIMARY KEY NOT NULL,
"key" varchar NOT NULL,
"stream" varchar NOT NULL,
UNIQUE ("key", "stream")
);
CREATE INDEX IF NOT EXISTS "relations_key_index" ON "event_store"."relations" USING btree ("key");
CREATE INDEX IF NOT EXISTS "relations_stream_index" ON "event_store"."relations" USING btree ("stream");
-- Snapshots Table
CREATE TABLE IF NOT EXISTS "event_store"."snapshots" (
"id" serial PRIMARY KEY NOT NULL,
"name" varchar NOT NULL,
"stream" varchar NOT NULL,
"cursor" varchar NOT NULL,
"state" jsonb NOT NULL,
UNIQUE ("name", "stream")
);
CREATE INDEX IF NOT EXISTS "snapshots_name_stream_cursor_index" ON "event_store"."snapshots" USING btree ("name","stream","cursor");
```

View File

@@ -0,0 +1,45 @@
import { EventStoreAdapter } from "../../types/adapter.ts";
import { PostgresConnection } from "./connection.ts";
import { PostgresDatabase } from "./database.ts";
import { PostgresEventsProvider } from "./providers/event.ts";
import { PostgresRelationsProvider } from "./providers/relations.ts";
import { PostgresSnapshotsProvider } from "./providers/snapshot.ts";
/**
* A server-based event store adapter that integrates database-specific providers.
*
* The `PostgresAdapter` enables event sourcing in a back end environment by utilizing
* PostgreSql for storage. It provides implementations for event storage, relations,
* and snapshots, allowing seamless integration with the shared event store interface.
*
* @template TEvent - The type of events managed by the event store.
*/
export class PostgresAdapter implements EventStoreAdapter<PostgresDatabase> {
readonly providers: {
readonly events: PostgresEventsProvider;
readonly relations: PostgresRelationsProvider;
readonly snapshots: PostgresSnapshotsProvider;
};
#database: PostgresDatabase;
constructor(
readonly connection: PostgresConnection,
readonly options: Options = {},
) {
this.#database = new PostgresDatabase(connection);
this.providers = {
events: new PostgresEventsProvider(this.#database, options.schema),
relations: new PostgresRelationsProvider(this.#database, options.schema),
snapshots: new PostgresSnapshotsProvider(this.#database, options.schema),
};
}
get db(): PostgresDatabase {
return this.#database;
}
}
type Options = {
schema?: string;
};

View File

@@ -0,0 +1,7 @@
import type { Options, Sql } from "postgres";
export type PostgresConnection = [PostgresConnectionUrl, Options<any>?] | [Options<any>] | Sql | PostgresConnectionFactory;
type PostgresConnectionUrl = `postgres://${string}:${string}@${string}:${number}/${string}`;
type PostgresConnectionFactory = () => Sql;

View File

@@ -0,0 +1,36 @@
import postgres, { type Sql } from "postgres";
import { PostgresConnection } from "./connection.ts";
export class PostgresDatabase {
readonly #connection: PostgresConnection;
#sql?: Sql;
constructor(connection: PostgresConnection) {
this.#connection = connection;
}
get sql(): Sql {
if (this.#sql === undefined) {
const connection = this.#connection;
if (Array.isArray(connection)) {
const [urlOrOptions, option] = connection;
if (typeof urlOrOptions === "string") {
this.#sql = postgres(urlOrOptions, option);
} else {
this.#sql = postgres(urlOrOptions);
}
} else if ("options" in connection) {
this.#sql = connection;
} else {
this.#sql = connection();
}
}
return this.#sql;
}
}
export type DatabaseAccessor = {
sql: Sql;
};

View File

@@ -0,0 +1,175 @@
import type { Helper } from "postgres";
import type { EventRecord } from "../../../libraries/event.ts";
import type { EventsProvider } from "../../../types/adapter.ts";
import type { EventReadOptions } from "../../../types/query.ts";
import type { PostgresDatabase } from "../database.ts";
type PGEventRecord = Omit<EventRecord, "data" | "meta"> & { data: string; meta: string };
export class PostgresEventsProvider implements EventsProvider {
constructor(
readonly db: PostgresDatabase,
readonly schema?: string,
) {}
get table(): Helper<string, []> {
if (this.schema !== undefined) {
return this.db.sql(`${this.schema}.events`);
}
return this.db.sql("public.events");
}
/**
* Insert a new event record to the events table.
*
* @param record - Event record to insert.
*/
async insert(record: EventRecord): Promise<void> {
await this.db.sql`INSERT INTO ${this.table} ${this.db.sql(this.#toDriver(record))}`.catch((error) => {
throw new Error(`EventStore > 'events.insert' failed with postgres error: ${error.message}`);
});
}
/**
* Insert many new event records to the events table.
*
* @param records - Event records to insert.
* @param batchSize - Batch size for the insert loop.
*/
async insertMany(records: EventRecord[], batchSize: number = 1_000): Promise<void> {
await this.db.sql
.begin(async (sql) => {
for (let i = 0; i < records.length; i += batchSize) {
await sql`INSERT INTO ${this.table} ${this.db.sql(records.slice(i, i + batchSize).map(this.#toDriver))}`;
}
})
.catch((error) => {
throw new Error(`EventStore > 'events.insertMany' failed with postgres error: ${error.message}`);
});
}
/**
* Retrieve all the events in the events table. Optionally a cursor and direction
* can be provided to reduce the list of events returned.
*
* @param options - Find options.
*/
async get(options: EventReadOptions): Promise<EventRecord[]> {
if (options !== undefined) {
const { filter, cursor, direction, limit } = options;
return this.db.sql<PGEventRecord[]>`
SELECT * FROM ${this.table}
WHERE
${filter?.types ? this.#withTypes(filter.types) : this.db.sql``}
${cursor ? this.#withCursor(cursor, direction) : this.db.sql``}
ORDER BY created ASC
${limit ? this.#withLimit(limit) : this.db.sql``}
`.then(this.#fromDriver);
}
return this.db.sql<PGEventRecord[]>`SELECT * FROM ${this.table} ORDER BY created ASC`.then(this.#fromDriver);
}
/**
* Get events within the given stream.
*
* @param stream - Stream to fetch events for.
* @param options - Read options for modifying the result.
*/
async getByStream(stream: string, { filter, cursor, direction, limit }: EventReadOptions = {}): Promise<EventRecord[]> {
return this.db.sql<PGEventRecord[]>`
SELECT * FROM ${this.table}
WHERE
stream = ${stream}
${filter?.types ? this.#withTypes(filter.types) : this.db.sql``}
${cursor ? this.#withCursor(cursor, direction) : this.db.sql``}
ORDER BY created ASC
${limit ? this.#withLimit(limit) : this.db.sql``}
`.then(this.#fromDriver);
}
/**
* Get events within given list of streams.
*
* @param streams - Stream to get events for.
* @param options - Read options for modifying the result.
*/
async getByStreams(streams: string[], { filter, cursor, direction, limit }: EventReadOptions = {}): Promise<EventRecord[]> {
return this.db.sql<PGEventRecord[]>`
SELECT * FROM ${this.table}
WHERE
stream IN ${this.db.sql(streams)}
${filter?.types ? this.#withTypes(filter.types) : this.db.sql``}
${cursor ? this.#withCursor(cursor, direction) : this.db.sql``}
ORDER BY created ASC
${limit ? this.#withLimit(limit) : this.db.sql``}
`.then(this.#fromDriver);
}
/**
* Get a single event by its id.
*
* @param id - Event id.
*/
async getById(id: string): Promise<EventRecord | undefined> {
return this.db.sql<PGEventRecord[]>`SELECT * FROM ${this.table} WHERE id = ${id}`.then(this.#fromDriver).then(([record]) => record);
}
/**
* Check if the given event is outdated in relation to the local event data.
*/
async checkOutdated({ stream, type, created }: EventRecord): Promise<boolean> {
const count = await await this.db.sql`
SELECT COUNT(*) AS count
FROM ${this.table}
WHERE
stream = ${stream}
AND type = ${type}
AND created > ${created}
`.then((result: any) => Number(result[0]));
return count > 0;
}
/*
|--------------------------------------------------------------------------------
| Utilities
|--------------------------------------------------------------------------------
*/
#withTypes(types: string[]) {
return this.db.sql`AND type IN ${this.db.sql(types)}`;
}
#withCursor(cursor: string, direction?: 1 | -1 | "asc" | "desc") {
if (direction === "desc" || direction === -1) {
return this.db.sql`AND created < ${cursor}`;
}
return this.db.sql`AND created > ${cursor}`;
}
#withLimit(limit: number) {
return this.db.sql`LIMIT ${limit}`;
}
/*
|--------------------------------------------------------------------------------
| Parsers
|--------------------------------------------------------------------------------
*/
#fromDriver(records: PGEventRecord[]): EventRecord[] {
return records.map((record) => {
record.data = typeof record.data === "string" ? JSON.parse(record.data) : record.data;
record.meta = typeof record.meta === "string" ? JSON.parse(record.meta) : record.meta;
return record as unknown as EventRecord;
});
}
#toDriver(record: EventRecord): PGEventRecord {
return {
...record,
data: JSON.stringify(record.data),
meta: JSON.stringify(record.meta),
};
}
}

View File

@@ -0,0 +1,140 @@
import type { Helper } from "postgres";
import type { Relation, RelationPayload, RelationsProvider } from "../../../types/adapter.ts";
import type { PostgresDatabase } from "../database.ts";
export class PostgresRelationsProvider implements RelationsProvider {
constructor(
readonly db: PostgresDatabase,
readonly schema?: string,
) {}
get table(): Helper<string, []> {
if (this.schema !== undefined) {
return this.db.sql(`${this.schema}.relations`);
}
return this.db.sql("public.relations");
}
/**
* Handle incoming relation operations.
*
* @param relations - List of relation operations to execute.
*/
async handle(relations: Relation[]): Promise<void> {
await Promise.all([
this.insertMany(relations.filter((relation) => relation.op === "insert")),
this.removeMany(relations.filter((relation) => relation.op === "remove")),
]);
}
/**
* Add stream to the relations table.
*
* @param key - Relational key to add stream to.
* @param stream - Stream to add to the key.
*/
async insert(key: string, stream: string): Promise<void> {
await this.db.sql`INSERT INTO ${this.table} (key, stream) VALUES (${key}, ${stream}) ON CONFLICT DO NOTHING`.catch((error) => {
throw new Error(`EventStore > 'relations.insert' failed with postgres error: ${error.message}`);
});
}
/**
* Add stream to many relational keys onto the relations table.
*
* @param relations - Relations to insert.
* @param batchSize - Batch size for the insert loop.
*/
async insertMany(relations: RelationPayload[], batchSize: number = 1_000): Promise<void> {
await this.db.sql
.begin(async (sql) => {
for (let i = 0; i < relations.length; i += batchSize) {
const values = relations.slice(i, i + batchSize).map(({ key, stream }) => [key, stream]);
await sql`INSERT INTO ${this.table} (key, stream) VALUES ${sql(values)} ON CONFLICT DO NOTHING`;
}
})
.catch((error) => {
throw new Error(`EventStore > 'relations.insertMany' failed with postgres error: ${error.message}`);
});
}
/**
* Get a list of event streams registered under the given relational key.
*
* @param key - Relational key to get event streams for.
*/
async getByKey(key: string): Promise<string[]> {
return this.db.sql`SELECT stream FROM ${this.table} WHERE key = ${key}`
.then((rows) => rows.map(({ stream }) => stream))
.catch((error) => {
throw new Error(`EventStore > 'relations.getByKey' failed with postgres error: ${error.message}`);
});
}
/**
* Get a list of event streams registered under the given relational keys.
*
* @param keys - Relational keys to get event streams for.
*/
async getByKeys(keys: string[]): Promise<string[]> {
return this.db.sql`SELECT DISTINCT stream FROM ${this.table} WHERE key IN ${this.db.sql(keys)}`
.then((rows) => rows.map(({ stream }) => stream))
.catch((error) => {
throw new Error(`EventStore > 'relations.getByKeys' failed with postgres error: ${error.message}`);
});
}
/**
* Removes a stream from the relational table.
*
* @param key - Relational key to remove stream from.
* @param stream - Stream to remove from relation.
*/
async remove(key: string, stream: string): Promise<void> {
await this.db.sql`DELETE FROM ${this.table} WHERE key = ${key} AND stream = ${stream}`.catch((error) => {
throw new Error(`EventStore > 'relations.remove' failed with postgres error: ${error.message}`);
});
}
/**
* Removes multiple relational entries.
*
* @param relations - Relations to remove stream from.
* @param batchSize - Batch size for the insert loop.
*/
async removeMany(relations: RelationPayload[], batchSize: number = 1_000): Promise<void> {
await this.db.sql
.begin(async (sql) => {
for (let i = 0; i < relations.length; i += batchSize) {
const conditions = relations.slice(i, i + batchSize).map(({ key, stream }) => `(key = '${key}' AND stream = '${stream}')`);
await sql`DELETE FROM ${this.table} WHERE ${this.db.sql.unsafe(conditions.join(" OR "))}`;
}
})
.catch((error) => {
throw new Error(`EventStore > 'relations.removeMany' failed with postgres error: ${error.message}`);
});
}
/**
* Remove all relations bound to the given relational keys.
*
* @param keys - Relational keys to remove from the relational table.
*/
async removeByKeys(keys: string[]): Promise<void> {
await this.db.sql`DELETE FROM ${this.table} WHERE key IN ${this.db.sql(keys)}`.catch((error) => {
throw new Error(`EventStore > 'relations.removeByKeys' failed with postgres error: ${error.message}`);
});
}
/**
* Remove all relations bound to the given streams.
*
* @param streams - Streams to remove from the relational table.
*/
async removeByStreams(streams: string[]): Promise<void> {
await this.db.sql`DELETE FROM ${this.table} WHERE stream IN ${this.db.sql(streams)}`.catch((error) => {
throw new Error(`EventStore > 'relations.removeByStreams' failed with postgres error: ${error.message}`);
});
}
}

View File

@@ -0,0 +1,82 @@
import type { Helper } from "postgres";
import type { Snapshot, SnapshotsProvider } from "../../../types/adapter.ts";
import type { PostgresDatabase } from "../database.ts";
type PGSnapshot = Omit<Snapshot, "state"> & { state: string };
export class PostgresSnapshotsProvider implements SnapshotsProvider {
constructor(
readonly db: PostgresDatabase,
readonly schema?: string,
) {}
get table(): Helper<string, []> {
if (this.schema !== undefined) {
return this.db.sql(`${this.schema}.snapshots`);
}
return this.db.sql("public.snapshots");
}
/**
* Add snapshot state under given reducer stream.
*
* @param name - Name of the reducer the snapshot is attached to.
* @param stream - Stream the snapshot is attached to.
* @param cursor - Cursor timestamp for the last event used in the snapshot.
* @param state - State of the reduced events.
*/
async insert(name: string, stream: string, cursor: string, state: any): Promise<void> {
await this.db.sql`
INSERT INTO ${this.table} ${this.db.sql(this.#toDriver({ name, stream, cursor, state }))}`.catch((error) => {
throw new Error(`EventStore > 'snapshots.insert' failed with postgres error: ${error.message}`);
});
}
/**
* Get snapshot state by stream.
*
* @param name - Name of the reducer which the state was created.
* @param stream - Stream the state was reduced for.
*/
async getByStream(name: string, stream: string): Promise<Snapshot | undefined> {
return this.db.sql<PGSnapshot[]>`SELECT * FROM ${this.table} WHERE name = ${name} AND stream = ${stream}`
.then(this.#fromDriver)
.then(([snapshot]) => snapshot)
.catch((error) => {
throw new Error(`EventStore > 'snapshots.getByStream' failed with postgres error: ${error.message}`);
});
}
/**
* Removes a snapshot for the given reducer stream.
*
* @param name - Name of the reducer the snapshot is attached to.
* @param stream - Stream to remove from snapshots.
*/
async remove(name: string, stream: string): Promise<void> {
await this.db.sql`DELETE FROM ${this.table} WHERE name = ${name} AND stream = ${stream}`.catch((error) => {
throw new Error(`EventStore > 'snapshots.remove' failed with postgres error: ${error.message}`);
});
}
/*
|--------------------------------------------------------------------------------
| Parsers
|--------------------------------------------------------------------------------
*/
#fromDriver(snapshots: PGSnapshot[]): Snapshot[] {
return snapshots.map((snapshot) => {
snapshot.state = typeof snapshot.state === "string" ? JSON.parse(snapshot.state) : snapshot.state;
return snapshot as unknown as Snapshot;
});
}
#toDriver(snapshot: Snapshot): object {
return {
...snapshot,
state: JSON.stringify(snapshot.state),
};
}
}