feat: version 2 beta
This commit is contained in:
102
adapters/postgres/README.md
Normal file
102
adapters/postgres/README.md
Normal file
@@ -0,0 +1,102 @@
|
||||
<p align="center">
|
||||
<img src="https://user-images.githubusercontent.com/1998130/229430454-ca0f2811-d874-4314-b13d-c558de8eec7e.svg" />
|
||||
</p>
|
||||
|
||||
# Postgres Adapter
|
||||
|
||||
The following instructions aims to guide you through setting up @valkyr/event-store with a postgres database.
|
||||
|
||||
## Event Store
|
||||
|
||||
Once we have defined our configs and printed our events we create a new postgres event store instance.
|
||||
|
||||
```ts
|
||||
import { makePostgresEventStore } from "@valkyr/event-store/postgres";
|
||||
import postgres from "postgres";
|
||||
|
||||
import { type Event, type EventRecord, events, validators } from "./generated/events.ts";
|
||||
|
||||
export const eventStore = makePostgresEventStore<Event>({
|
||||
connection: () => postgres("postgres://${string}:${string}@${string}:${number}/${string}"), // lazy loaded connection
|
||||
schema: "event_store",
|
||||
events,
|
||||
validators,
|
||||
hooks: {
|
||||
async onError(error) {
|
||||
// when the event store throws unhandled errors they will end up in
|
||||
// this location that can be further logged in the systems own logger
|
||||
// if onError hook is not provided all unhandled errors are logged
|
||||
// through the `console.error` method.
|
||||
},
|
||||
},
|
||||
});
|
||||
|
||||
const projector = new Projector<EventRecord>();
|
||||
|
||||
eventStore.onEventsInserted(async (records, { batch }) => {
|
||||
// trigger event side effects here such as sending the records through
|
||||
// an event messaging system or other projection patterns
|
||||
|
||||
// ### Projector
|
||||
// The following is an example when registering event handlers with the
|
||||
// projectors instance provided by this library.
|
||||
|
||||
if (batch !== undefined) {
|
||||
await projector.pushMany(batch, records);
|
||||
} else {
|
||||
for (const record of records) {
|
||||
await projector.push(record, { hydrated: false, outdated: false });
|
||||
}
|
||||
}
|
||||
});
|
||||
```
|
||||
|
||||
## Migrations
|
||||
|
||||
We do not manage migrations in your local solutions so what we provide is a sample SQL script for optimal query setup. The following example assumes all event tables goes into a `event_store` schema. If you are adding these tables to a different schema or into the public default postgres space you will need to modify this sample accordingly.
|
||||
|
||||
```sql
|
||||
CREATE SCHEMA "event_store";
|
||||
|
||||
-- Event Table
|
||||
|
||||
CREATE TABLE IF NOT EXISTS "event_store"."events" (
|
||||
"id" varchar PRIMARY KEY NOT NULL,
|
||||
"stream" varchar NOT NULL,
|
||||
"type" varchar NOT NULL,
|
||||
"data" jsonb NOT NULL,
|
||||
"meta" jsonb NOT NULL,
|
||||
"recorded" varchar NOT NULL,
|
||||
"created" varchar NOT NULL
|
||||
);
|
||||
|
||||
CREATE INDEX IF NOT EXISTS "events_stream_index" ON "event_store"."events" USING btree ("stream");
|
||||
CREATE INDEX IF NOT EXISTS "events_type_index" ON "event_store"."events" USING btree ("type");
|
||||
CREATE INDEX IF NOT EXISTS "events_recorded_index" ON "event_store"."events" USING btree ("recorded");
|
||||
CREATE INDEX IF NOT EXISTS "events_created_index" ON "event_store"."events" USING btree ("created");
|
||||
|
||||
-- Relations Table
|
||||
|
||||
CREATE TABLE IF NOT EXISTS "event_store"."relations" (
|
||||
"id" serial PRIMARY KEY NOT NULL,
|
||||
"key" varchar NOT NULL,
|
||||
"stream" varchar NOT NULL,
|
||||
UNIQUE ("key", "stream")
|
||||
);
|
||||
|
||||
CREATE INDEX IF NOT EXISTS "relations_key_index" ON "event_store"."relations" USING btree ("key");
|
||||
CREATE INDEX IF NOT EXISTS "relations_stream_index" ON "event_store"."relations" USING btree ("stream");
|
||||
|
||||
-- Snapshots Table
|
||||
|
||||
CREATE TABLE IF NOT EXISTS "event_store"."snapshots" (
|
||||
"id" serial PRIMARY KEY NOT NULL,
|
||||
"name" varchar NOT NULL,
|
||||
"stream" varchar NOT NULL,
|
||||
"cursor" varchar NOT NULL,
|
||||
"state" jsonb NOT NULL,
|
||||
UNIQUE ("name", "stream")
|
||||
);
|
||||
|
||||
CREATE INDEX IF NOT EXISTS "snapshots_name_stream_cursor_index" ON "event_store"."snapshots" USING btree ("name","stream","cursor");
|
||||
```
|
||||
45
adapters/postgres/adapter.ts
Normal file
45
adapters/postgres/adapter.ts
Normal file
@@ -0,0 +1,45 @@
|
||||
import { EventStoreAdapter } from "../../types/adapter.ts";
|
||||
import { PostgresConnection } from "./connection.ts";
|
||||
import { PostgresDatabase } from "./database.ts";
|
||||
import { PostgresEventsProvider } from "./providers/event.ts";
|
||||
import { PostgresRelationsProvider } from "./providers/relations.ts";
|
||||
import { PostgresSnapshotsProvider } from "./providers/snapshot.ts";
|
||||
|
||||
/**
|
||||
* A server-based event store adapter that integrates database-specific providers.
|
||||
*
|
||||
* The `PostgresAdapter` enables event sourcing in a back end environment by utilizing
|
||||
* PostgreSql for storage. It provides implementations for event storage, relations,
|
||||
* and snapshots, allowing seamless integration with the shared event store interface.
|
||||
*
|
||||
* @template TEvent - The type of events managed by the event store.
|
||||
*/
|
||||
export class PostgresAdapter implements EventStoreAdapter<PostgresDatabase> {
|
||||
readonly providers: {
|
||||
readonly events: PostgresEventsProvider;
|
||||
readonly relations: PostgresRelationsProvider;
|
||||
readonly snapshots: PostgresSnapshotsProvider;
|
||||
};
|
||||
|
||||
#database: PostgresDatabase;
|
||||
|
||||
constructor(
|
||||
readonly connection: PostgresConnection,
|
||||
readonly options: Options = {},
|
||||
) {
|
||||
this.#database = new PostgresDatabase(connection);
|
||||
this.providers = {
|
||||
events: new PostgresEventsProvider(this.#database, options.schema),
|
||||
relations: new PostgresRelationsProvider(this.#database, options.schema),
|
||||
snapshots: new PostgresSnapshotsProvider(this.#database, options.schema),
|
||||
};
|
||||
}
|
||||
|
||||
get db(): PostgresDatabase {
|
||||
return this.#database;
|
||||
}
|
||||
}
|
||||
|
||||
type Options = {
|
||||
schema?: string;
|
||||
};
|
||||
7
adapters/postgres/connection.ts
Normal file
7
adapters/postgres/connection.ts
Normal file
@@ -0,0 +1,7 @@
|
||||
import type { Options, Sql } from "postgres";
|
||||
|
||||
export type PostgresConnection = [PostgresConnectionUrl, Options<any>?] | [Options<any>] | Sql | PostgresConnectionFactory;
|
||||
|
||||
type PostgresConnectionUrl = `postgres://${string}:${string}@${string}:${number}/${string}`;
|
||||
|
||||
type PostgresConnectionFactory = () => Sql;
|
||||
36
adapters/postgres/database.ts
Normal file
36
adapters/postgres/database.ts
Normal file
@@ -0,0 +1,36 @@
|
||||
import postgres, { type Sql } from "postgres";
|
||||
|
||||
import { PostgresConnection } from "./connection.ts";
|
||||
|
||||
export class PostgresDatabase {
|
||||
readonly #connection: PostgresConnection;
|
||||
|
||||
#sql?: Sql;
|
||||
|
||||
constructor(connection: PostgresConnection) {
|
||||
this.#connection = connection;
|
||||
}
|
||||
|
||||
get sql(): Sql {
|
||||
if (this.#sql === undefined) {
|
||||
const connection = this.#connection;
|
||||
if (Array.isArray(connection)) {
|
||||
const [urlOrOptions, option] = connection;
|
||||
if (typeof urlOrOptions === "string") {
|
||||
this.#sql = postgres(urlOrOptions, option);
|
||||
} else {
|
||||
this.#sql = postgres(urlOrOptions);
|
||||
}
|
||||
} else if ("options" in connection) {
|
||||
this.#sql = connection;
|
||||
} else {
|
||||
this.#sql = connection();
|
||||
}
|
||||
}
|
||||
return this.#sql;
|
||||
}
|
||||
}
|
||||
|
||||
export type DatabaseAccessor = {
|
||||
sql: Sql;
|
||||
};
|
||||
175
adapters/postgres/providers/event.ts
Normal file
175
adapters/postgres/providers/event.ts
Normal file
@@ -0,0 +1,175 @@
|
||||
import type { Helper } from "postgres";
|
||||
|
||||
import type { EventRecord } from "../../../libraries/event.ts";
|
||||
import type { EventsProvider } from "../../../types/adapter.ts";
|
||||
import type { EventReadOptions } from "../../../types/query.ts";
|
||||
import type { PostgresDatabase } from "../database.ts";
|
||||
|
||||
type PGEventRecord = Omit<EventRecord, "data" | "meta"> & { data: string; meta: string };
|
||||
|
||||
export class PostgresEventsProvider implements EventsProvider {
|
||||
constructor(
|
||||
readonly db: PostgresDatabase,
|
||||
readonly schema?: string,
|
||||
) {}
|
||||
|
||||
get table(): Helper<string, []> {
|
||||
if (this.schema !== undefined) {
|
||||
return this.db.sql(`${this.schema}.events`);
|
||||
}
|
||||
return this.db.sql("public.events");
|
||||
}
|
||||
|
||||
/**
|
||||
* Insert a new event record to the events table.
|
||||
*
|
||||
* @param record - Event record to insert.
|
||||
*/
|
||||
async insert(record: EventRecord): Promise<void> {
|
||||
await this.db.sql`INSERT INTO ${this.table} ${this.db.sql(this.#toDriver(record))}`.catch((error) => {
|
||||
throw new Error(`EventStore > 'events.insert' failed with postgres error: ${error.message}`);
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* Insert many new event records to the events table.
|
||||
*
|
||||
* @param records - Event records to insert.
|
||||
* @param batchSize - Batch size for the insert loop.
|
||||
*/
|
||||
async insertMany(records: EventRecord[], batchSize: number = 1_000): Promise<void> {
|
||||
await this.db.sql
|
||||
.begin(async (sql) => {
|
||||
for (let i = 0; i < records.length; i += batchSize) {
|
||||
await sql`INSERT INTO ${this.table} ${this.db.sql(records.slice(i, i + batchSize).map(this.#toDriver))}`;
|
||||
}
|
||||
})
|
||||
.catch((error) => {
|
||||
throw new Error(`EventStore > 'events.insertMany' failed with postgres error: ${error.message}`);
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* Retrieve all the events in the events table. Optionally a cursor and direction
|
||||
* can be provided to reduce the list of events returned.
|
||||
*
|
||||
* @param options - Find options.
|
||||
*/
|
||||
async get(options: EventReadOptions): Promise<EventRecord[]> {
|
||||
if (options !== undefined) {
|
||||
const { filter, cursor, direction, limit } = options;
|
||||
return this.db.sql<PGEventRecord[]>`
|
||||
SELECT * FROM ${this.table}
|
||||
WHERE
|
||||
${filter?.types ? this.#withTypes(filter.types) : this.db.sql``}
|
||||
${cursor ? this.#withCursor(cursor, direction) : this.db.sql``}
|
||||
ORDER BY created ASC
|
||||
${limit ? this.#withLimit(limit) : this.db.sql``}
|
||||
`.then(this.#fromDriver);
|
||||
}
|
||||
return this.db.sql<PGEventRecord[]>`SELECT * FROM ${this.table} ORDER BY created ASC`.then(this.#fromDriver);
|
||||
}
|
||||
|
||||
/**
|
||||
* Get events within the given stream.
|
||||
*
|
||||
* @param stream - Stream to fetch events for.
|
||||
* @param options - Read options for modifying the result.
|
||||
*/
|
||||
async getByStream(stream: string, { filter, cursor, direction, limit }: EventReadOptions = {}): Promise<EventRecord[]> {
|
||||
return this.db.sql<PGEventRecord[]>`
|
||||
SELECT * FROM ${this.table}
|
||||
WHERE
|
||||
stream = ${stream}
|
||||
${filter?.types ? this.#withTypes(filter.types) : this.db.sql``}
|
||||
${cursor ? this.#withCursor(cursor, direction) : this.db.sql``}
|
||||
ORDER BY created ASC
|
||||
${limit ? this.#withLimit(limit) : this.db.sql``}
|
||||
`.then(this.#fromDriver);
|
||||
}
|
||||
|
||||
/**
|
||||
* Get events within given list of streams.
|
||||
*
|
||||
* @param streams - Stream to get events for.
|
||||
* @param options - Read options for modifying the result.
|
||||
*/
|
||||
async getByStreams(streams: string[], { filter, cursor, direction, limit }: EventReadOptions = {}): Promise<EventRecord[]> {
|
||||
return this.db.sql<PGEventRecord[]>`
|
||||
SELECT * FROM ${this.table}
|
||||
WHERE
|
||||
stream IN ${this.db.sql(streams)}
|
||||
${filter?.types ? this.#withTypes(filter.types) : this.db.sql``}
|
||||
${cursor ? this.#withCursor(cursor, direction) : this.db.sql``}
|
||||
ORDER BY created ASC
|
||||
${limit ? this.#withLimit(limit) : this.db.sql``}
|
||||
`.then(this.#fromDriver);
|
||||
}
|
||||
|
||||
/**
|
||||
* Get a single event by its id.
|
||||
*
|
||||
* @param id - Event id.
|
||||
*/
|
||||
async getById(id: string): Promise<EventRecord | undefined> {
|
||||
return this.db.sql<PGEventRecord[]>`SELECT * FROM ${this.table} WHERE id = ${id}`.then(this.#fromDriver).then(([record]) => record);
|
||||
}
|
||||
|
||||
/**
|
||||
* Check if the given event is outdated in relation to the local event data.
|
||||
*/
|
||||
async checkOutdated({ stream, type, created }: EventRecord): Promise<boolean> {
|
||||
const count = await await this.db.sql`
|
||||
SELECT COUNT(*) AS count
|
||||
FROM ${this.table}
|
||||
WHERE
|
||||
stream = ${stream}
|
||||
AND type = ${type}
|
||||
AND created > ${created}
|
||||
`.then((result: any) => Number(result[0]));
|
||||
return count > 0;
|
||||
}
|
||||
|
||||
/*
|
||||
|--------------------------------------------------------------------------------
|
||||
| Utilities
|
||||
|--------------------------------------------------------------------------------
|
||||
*/
|
||||
|
||||
#withTypes(types: string[]) {
|
||||
return this.db.sql`AND type IN ${this.db.sql(types)}`;
|
||||
}
|
||||
|
||||
#withCursor(cursor: string, direction?: 1 | -1 | "asc" | "desc") {
|
||||
if (direction === "desc" || direction === -1) {
|
||||
return this.db.sql`AND created < ${cursor}`;
|
||||
}
|
||||
return this.db.sql`AND created > ${cursor}`;
|
||||
}
|
||||
|
||||
#withLimit(limit: number) {
|
||||
return this.db.sql`LIMIT ${limit}`;
|
||||
}
|
||||
|
||||
/*
|
||||
|--------------------------------------------------------------------------------
|
||||
| Parsers
|
||||
|--------------------------------------------------------------------------------
|
||||
*/
|
||||
|
||||
#fromDriver(records: PGEventRecord[]): EventRecord[] {
|
||||
return records.map((record) => {
|
||||
record.data = typeof record.data === "string" ? JSON.parse(record.data) : record.data;
|
||||
record.meta = typeof record.meta === "string" ? JSON.parse(record.meta) : record.meta;
|
||||
return record as unknown as EventRecord;
|
||||
});
|
||||
}
|
||||
|
||||
#toDriver(record: EventRecord): PGEventRecord {
|
||||
return {
|
||||
...record,
|
||||
data: JSON.stringify(record.data),
|
||||
meta: JSON.stringify(record.meta),
|
||||
};
|
||||
}
|
||||
}
|
||||
140
adapters/postgres/providers/relations.ts
Normal file
140
adapters/postgres/providers/relations.ts
Normal file
@@ -0,0 +1,140 @@
|
||||
import type { Helper } from "postgres";
|
||||
|
||||
import type { Relation, RelationPayload, RelationsProvider } from "../../../types/adapter.ts";
|
||||
import type { PostgresDatabase } from "../database.ts";
|
||||
|
||||
export class PostgresRelationsProvider implements RelationsProvider {
|
||||
constructor(
|
||||
readonly db: PostgresDatabase,
|
||||
readonly schema?: string,
|
||||
) {}
|
||||
|
||||
get table(): Helper<string, []> {
|
||||
if (this.schema !== undefined) {
|
||||
return this.db.sql(`${this.schema}.relations`);
|
||||
}
|
||||
return this.db.sql("public.relations");
|
||||
}
|
||||
|
||||
/**
|
||||
* Handle incoming relation operations.
|
||||
*
|
||||
* @param relations - List of relation operations to execute.
|
||||
*/
|
||||
async handle(relations: Relation[]): Promise<void> {
|
||||
await Promise.all([
|
||||
this.insertMany(relations.filter((relation) => relation.op === "insert")),
|
||||
this.removeMany(relations.filter((relation) => relation.op === "remove")),
|
||||
]);
|
||||
}
|
||||
|
||||
/**
|
||||
* Add stream to the relations table.
|
||||
*
|
||||
* @param key - Relational key to add stream to.
|
||||
* @param stream - Stream to add to the key.
|
||||
*/
|
||||
async insert(key: string, stream: string): Promise<void> {
|
||||
await this.db.sql`INSERT INTO ${this.table} (key, stream) VALUES (${key}, ${stream}) ON CONFLICT DO NOTHING`.catch((error) => {
|
||||
throw new Error(`EventStore > 'relations.insert' failed with postgres error: ${error.message}`);
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* Add stream to many relational keys onto the relations table.
|
||||
*
|
||||
* @param relations - Relations to insert.
|
||||
* @param batchSize - Batch size for the insert loop.
|
||||
*/
|
||||
async insertMany(relations: RelationPayload[], batchSize: number = 1_000): Promise<void> {
|
||||
await this.db.sql
|
||||
.begin(async (sql) => {
|
||||
for (let i = 0; i < relations.length; i += batchSize) {
|
||||
const values = relations.slice(i, i + batchSize).map(({ key, stream }) => [key, stream]);
|
||||
await sql`INSERT INTO ${this.table} (key, stream) VALUES ${sql(values)} ON CONFLICT DO NOTHING`;
|
||||
}
|
||||
})
|
||||
.catch((error) => {
|
||||
throw new Error(`EventStore > 'relations.insertMany' failed with postgres error: ${error.message}`);
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* Get a list of event streams registered under the given relational key.
|
||||
*
|
||||
* @param key - Relational key to get event streams for.
|
||||
*/
|
||||
async getByKey(key: string): Promise<string[]> {
|
||||
return this.db.sql`SELECT stream FROM ${this.table} WHERE key = ${key}`
|
||||
.then((rows) => rows.map(({ stream }) => stream))
|
||||
.catch((error) => {
|
||||
throw new Error(`EventStore > 'relations.getByKey' failed with postgres error: ${error.message}`);
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* Get a list of event streams registered under the given relational keys.
|
||||
*
|
||||
* @param keys - Relational keys to get event streams for.
|
||||
*/
|
||||
async getByKeys(keys: string[]): Promise<string[]> {
|
||||
return this.db.sql`SELECT DISTINCT stream FROM ${this.table} WHERE key IN ${this.db.sql(keys)}`
|
||||
.then((rows) => rows.map(({ stream }) => stream))
|
||||
.catch((error) => {
|
||||
throw new Error(`EventStore > 'relations.getByKeys' failed with postgres error: ${error.message}`);
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* Removes a stream from the relational table.
|
||||
*
|
||||
* @param key - Relational key to remove stream from.
|
||||
* @param stream - Stream to remove from relation.
|
||||
*/
|
||||
async remove(key: string, stream: string): Promise<void> {
|
||||
await this.db.sql`DELETE FROM ${this.table} WHERE key = ${key} AND stream = ${stream}`.catch((error) => {
|
||||
throw new Error(`EventStore > 'relations.remove' failed with postgres error: ${error.message}`);
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* Removes multiple relational entries.
|
||||
*
|
||||
* @param relations - Relations to remove stream from.
|
||||
* @param batchSize - Batch size for the insert loop.
|
||||
*/
|
||||
async removeMany(relations: RelationPayload[], batchSize: number = 1_000): Promise<void> {
|
||||
await this.db.sql
|
||||
.begin(async (sql) => {
|
||||
for (let i = 0; i < relations.length; i += batchSize) {
|
||||
const conditions = relations.slice(i, i + batchSize).map(({ key, stream }) => `(key = '${key}' AND stream = '${stream}')`);
|
||||
await sql`DELETE FROM ${this.table} WHERE ${this.db.sql.unsafe(conditions.join(" OR "))}`;
|
||||
}
|
||||
})
|
||||
.catch((error) => {
|
||||
throw new Error(`EventStore > 'relations.removeMany' failed with postgres error: ${error.message}`);
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* Remove all relations bound to the given relational keys.
|
||||
*
|
||||
* @param keys - Relational keys to remove from the relational table.
|
||||
*/
|
||||
async removeByKeys(keys: string[]): Promise<void> {
|
||||
await this.db.sql`DELETE FROM ${this.table} WHERE key IN ${this.db.sql(keys)}`.catch((error) => {
|
||||
throw new Error(`EventStore > 'relations.removeByKeys' failed with postgres error: ${error.message}`);
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* Remove all relations bound to the given streams.
|
||||
*
|
||||
* @param streams - Streams to remove from the relational table.
|
||||
*/
|
||||
async removeByStreams(streams: string[]): Promise<void> {
|
||||
await this.db.sql`DELETE FROM ${this.table} WHERE stream IN ${this.db.sql(streams)}`.catch((error) => {
|
||||
throw new Error(`EventStore > 'relations.removeByStreams' failed with postgres error: ${error.message}`);
|
||||
});
|
||||
}
|
||||
}
|
||||
82
adapters/postgres/providers/snapshot.ts
Normal file
82
adapters/postgres/providers/snapshot.ts
Normal file
@@ -0,0 +1,82 @@
|
||||
import type { Helper } from "postgres";
|
||||
|
||||
import type { Snapshot, SnapshotsProvider } from "../../../types/adapter.ts";
|
||||
import type { PostgresDatabase } from "../database.ts";
|
||||
|
||||
type PGSnapshot = Omit<Snapshot, "state"> & { state: string };
|
||||
|
||||
export class PostgresSnapshotsProvider implements SnapshotsProvider {
|
||||
constructor(
|
||||
readonly db: PostgresDatabase,
|
||||
readonly schema?: string,
|
||||
) {}
|
||||
|
||||
get table(): Helper<string, []> {
|
||||
if (this.schema !== undefined) {
|
||||
return this.db.sql(`${this.schema}.snapshots`);
|
||||
}
|
||||
return this.db.sql("public.snapshots");
|
||||
}
|
||||
|
||||
/**
|
||||
* Add snapshot state under given reducer stream.
|
||||
*
|
||||
* @param name - Name of the reducer the snapshot is attached to.
|
||||
* @param stream - Stream the snapshot is attached to.
|
||||
* @param cursor - Cursor timestamp for the last event used in the snapshot.
|
||||
* @param state - State of the reduced events.
|
||||
*/
|
||||
async insert(name: string, stream: string, cursor: string, state: any): Promise<void> {
|
||||
await this.db.sql`
|
||||
INSERT INTO ${this.table} ${this.db.sql(this.#toDriver({ name, stream, cursor, state }))}`.catch((error) => {
|
||||
throw new Error(`EventStore > 'snapshots.insert' failed with postgres error: ${error.message}`);
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* Get snapshot state by stream.
|
||||
*
|
||||
* @param name - Name of the reducer which the state was created.
|
||||
* @param stream - Stream the state was reduced for.
|
||||
*/
|
||||
async getByStream(name: string, stream: string): Promise<Snapshot | undefined> {
|
||||
return this.db.sql<PGSnapshot[]>`SELECT * FROM ${this.table} WHERE name = ${name} AND stream = ${stream}`
|
||||
.then(this.#fromDriver)
|
||||
.then(([snapshot]) => snapshot)
|
||||
.catch((error) => {
|
||||
throw new Error(`EventStore > 'snapshots.getByStream' failed with postgres error: ${error.message}`);
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* Removes a snapshot for the given reducer stream.
|
||||
*
|
||||
* @param name - Name of the reducer the snapshot is attached to.
|
||||
* @param stream - Stream to remove from snapshots.
|
||||
*/
|
||||
async remove(name: string, stream: string): Promise<void> {
|
||||
await this.db.sql`DELETE FROM ${this.table} WHERE name = ${name} AND stream = ${stream}`.catch((error) => {
|
||||
throw new Error(`EventStore > 'snapshots.remove' failed with postgres error: ${error.message}`);
|
||||
});
|
||||
}
|
||||
|
||||
/*
|
||||
|--------------------------------------------------------------------------------
|
||||
| Parsers
|
||||
|--------------------------------------------------------------------------------
|
||||
*/
|
||||
|
||||
#fromDriver(snapshots: PGSnapshot[]): Snapshot[] {
|
||||
return snapshots.map((snapshot) => {
|
||||
snapshot.state = typeof snapshot.state === "string" ? JSON.parse(snapshot.state) : snapshot.state;
|
||||
return snapshot as unknown as Snapshot;
|
||||
});
|
||||
}
|
||||
|
||||
#toDriver(snapshot: Snapshot): object {
|
||||
return {
|
||||
...snapshot,
|
||||
state: JSON.stringify(snapshot.state),
|
||||
};
|
||||
}
|
||||
}
|
||||
Reference in New Issue
Block a user