feat: version 2 beta
This commit is contained in:
91
adapters/mongo/adapter.ts
Normal file
91
adapters/mongo/adapter.ts
Normal file
@@ -0,0 +1,91 @@
|
||||
import type { MongoConnectionUrl } from "@valkyr/testcontainers/mongodb";
|
||||
import { Db, MongoClient } from "mongodb";
|
||||
|
||||
import { EventStoreAdapter } from "../../types/adapter.ts";
|
||||
import { registrars } from "./collections/mod.ts";
|
||||
import { MongoEventsProvider } from "./providers/events.ts";
|
||||
import { MongoRelationsProvider } from "./providers/relations.ts";
|
||||
import { MongoSnapshotsProvider } from "./providers/snapshots.ts";
|
||||
import { DatabaseAccessor } from "./types.ts";
|
||||
import { getCollectionsSet } from "./utilities.ts";
|
||||
|
||||
/**
|
||||
* A server-based event store adapter that integrates database-specific providers.
|
||||
*
|
||||
* The `MongoAdapter` enables event sourcing in a back end environment by utilizing
|
||||
* MongoDB for storage. It provides implementations for event storage, relations,
|
||||
* and snapshots, allowing seamless integration with the shared event store interface.
|
||||
*
|
||||
* @template TEvent - The type of events managed by the event store.
|
||||
*/
|
||||
export class MongoAdapter implements EventStoreAdapter<DatabaseAccessor> {
|
||||
readonly providers: {
|
||||
readonly events: MongoEventsProvider;
|
||||
readonly relations: MongoRelationsProvider;
|
||||
readonly snapshots: MongoSnapshotsProvider;
|
||||
};
|
||||
|
||||
readonly #accessor: DatabaseAccessor;
|
||||
|
||||
constructor(connection: MongoConnection, db: string) {
|
||||
this.#accessor = getDatabaseAccessor(connection, db);
|
||||
this.providers = {
|
||||
events: new MongoEventsProvider(this.#accessor),
|
||||
relations: new MongoRelationsProvider(this.#accessor),
|
||||
snapshots: new MongoSnapshotsProvider(this.#accessor),
|
||||
};
|
||||
}
|
||||
|
||||
get db(): DatabaseAccessor {
|
||||
return this.#accessor;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Takes a mongo database and registers the event store collections and
|
||||
* indexes defined internally.
|
||||
*
|
||||
* @param db - Mongo database to register event store collections against.
|
||||
* @param logger - Logger method to print internal logs.
|
||||
*/
|
||||
export async function register(db: Db, logger?: (...args: any[]) => any) {
|
||||
const list = await getCollectionsSet(db);
|
||||
for (const { name, indexes } of registrars) {
|
||||
if (list.has(name)) {
|
||||
continue;
|
||||
}
|
||||
await db.createCollection(name);
|
||||
for (const [indexSpec, options] of indexes) {
|
||||
await db.collection(name).createIndex(indexSpec, options);
|
||||
logger?.("Mongo Event Store > Collection '%s' is indexed [%O] with options %O", name, indexSpec, options ?? {});
|
||||
}
|
||||
logger?.("Mongo Event Store > Collection '%s' is registered", name);
|
||||
}
|
||||
}
|
||||
|
||||
function getDatabaseAccessor(connection: MongoConnection, database: string): DatabaseAccessor {
|
||||
let instance: Db | undefined;
|
||||
return {
|
||||
get db(): Db {
|
||||
if (instance === undefined) {
|
||||
instance = this.client.db(database);
|
||||
}
|
||||
return instance;
|
||||
},
|
||||
get client(): MongoClient {
|
||||
if (typeof connection === "string") {
|
||||
return new MongoClient(connection);
|
||||
}
|
||||
if (connection instanceof MongoClient) {
|
||||
return connection;
|
||||
}
|
||||
return connection();
|
||||
},
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* Connection which the adapter supports, this can be a `url`, a `client` instance
|
||||
* or a lazy method that provided `client` instance on demand.
|
||||
*/
|
||||
export type MongoConnection = MongoConnectionUrl | MongoClient | (() => MongoClient);
|
||||
49
adapters/mongo/collections/events.ts
Normal file
49
adapters/mongo/collections/events.ts
Normal file
@@ -0,0 +1,49 @@
|
||||
import z from "zod";
|
||||
|
||||
import type { CollectionRegistrar } from "../types.ts";
|
||||
|
||||
export const registrar: CollectionRegistrar = {
|
||||
name: "events",
|
||||
indexes: [
|
||||
[
|
||||
{
|
||||
stream: 1,
|
||||
},
|
||||
],
|
||||
[
|
||||
{
|
||||
type: 1,
|
||||
},
|
||||
],
|
||||
[
|
||||
{
|
||||
recorded: 1,
|
||||
},
|
||||
],
|
||||
[
|
||||
{
|
||||
created: 1,
|
||||
},
|
||||
],
|
||||
],
|
||||
};
|
||||
|
||||
export const schema = z.object({
|
||||
id: z.string(),
|
||||
stream: z.string(),
|
||||
type: z.string(),
|
||||
data: z.any(),
|
||||
meta: z.any(),
|
||||
recorded: z.string(),
|
||||
created: z.string(),
|
||||
});
|
||||
|
||||
export type EventSchema = {
|
||||
id: string;
|
||||
stream: string;
|
||||
type: string;
|
||||
data: Record<string, any> | null;
|
||||
meta: Record<string, any> | null;
|
||||
recorded: string;
|
||||
created: string;
|
||||
};
|
||||
10
adapters/mongo/collections/mod.ts
Normal file
10
adapters/mongo/collections/mod.ts
Normal file
@@ -0,0 +1,10 @@
|
||||
import { CollectionRegistrar } from "../types.ts";
|
||||
import { registrar as events } from "./events.ts";
|
||||
import { registrar as relations } from "./relations.ts";
|
||||
import { registrar as snapshots } from "./snapshots.ts";
|
||||
|
||||
export const registrars: CollectionRegistrar[] = [
|
||||
events,
|
||||
relations,
|
||||
snapshots,
|
||||
];
|
||||
38
adapters/mongo/collections/relations.ts
Normal file
38
adapters/mongo/collections/relations.ts
Normal file
@@ -0,0 +1,38 @@
|
||||
import z from "zod";
|
||||
|
||||
import type { CollectionRegistrar } from "../types.ts";
|
||||
|
||||
export const registrar: CollectionRegistrar = {
|
||||
name: "relations",
|
||||
indexes: [
|
||||
[
|
||||
{
|
||||
key: 1,
|
||||
},
|
||||
],
|
||||
[
|
||||
{
|
||||
stream: 1,
|
||||
},
|
||||
],
|
||||
[
|
||||
{
|
||||
key: 1,
|
||||
stream: 1,
|
||||
},
|
||||
{
|
||||
unique: true,
|
||||
},
|
||||
],
|
||||
],
|
||||
};
|
||||
|
||||
export const schema = z.object({
|
||||
key: z.string(),
|
||||
streams: z.string().array(),
|
||||
});
|
||||
|
||||
export type RelationSchema = {
|
||||
key: string;
|
||||
streams: string[];
|
||||
};
|
||||
30
adapters/mongo/collections/snapshots.ts
Normal file
30
adapters/mongo/collections/snapshots.ts
Normal file
@@ -0,0 +1,30 @@
|
||||
import z from "zod";
|
||||
|
||||
import type { CollectionRegistrar } from "../types.ts";
|
||||
|
||||
export const registrar: CollectionRegistrar = {
|
||||
name: "snapshots",
|
||||
indexes: [
|
||||
[
|
||||
{
|
||||
name: 1,
|
||||
stream: 1,
|
||||
cursor: 1,
|
||||
},
|
||||
],
|
||||
],
|
||||
};
|
||||
|
||||
export const schema = z.object({
|
||||
name: z.string(),
|
||||
stream: z.string(),
|
||||
cursor: z.string(),
|
||||
state: z.record(z.string(), z.any()),
|
||||
});
|
||||
|
||||
export type SnapshotSchema = {
|
||||
name: string;
|
||||
stream: string;
|
||||
cursor: string;
|
||||
state: Record<string, any>;
|
||||
};
|
||||
131
adapters/mongo/providers/events.ts
Normal file
131
adapters/mongo/providers/events.ts
Normal file
@@ -0,0 +1,131 @@
|
||||
import type { Collection, FindCursor } from "mongodb";
|
||||
|
||||
import { EventRecord } from "../../../libraries/event.ts";
|
||||
import type { EventsProvider } from "../../../types/adapter.ts";
|
||||
import type { EventReadOptions } from "../../../types/query.ts";
|
||||
import { type EventSchema, schema } from "../collections/events.ts";
|
||||
import { DatabaseAccessor } from "../types.ts";
|
||||
import { toParsedRecord, toParsedRecords } from "../utilities.ts";
|
||||
|
||||
export class MongoEventsProvider implements EventsProvider {
|
||||
readonly #accessor: DatabaseAccessor;
|
||||
|
||||
constructor(accessor: DatabaseAccessor) {
|
||||
this.#accessor = accessor;
|
||||
}
|
||||
|
||||
get collection(): Collection<EventSchema> {
|
||||
return this.#accessor.db.collection<EventSchema>("events");
|
||||
}
|
||||
|
||||
/**
|
||||
* Insert a new event record to the events table.
|
||||
*
|
||||
* @param record - Event record to insert.
|
||||
* @param tx - Transaction to insert the record within. (Optional)
|
||||
*/
|
||||
async insert(record: EventRecord): Promise<void> {
|
||||
await this.collection.insertOne(record, { forceServerObjectId: true });
|
||||
}
|
||||
|
||||
/**
|
||||
* Insert many new event records to the events table.
|
||||
*
|
||||
* @param records - Event records to insert.
|
||||
*/
|
||||
async insertMany(records: EventRecord[]): Promise<void> {
|
||||
await this.collection.insertMany(records, { forceServerObjectId: true });
|
||||
}
|
||||
|
||||
/**
|
||||
* Retrieve all the events in the events table. Optionally a cursor and direction
|
||||
* can be provided to reduce the list of events returned.
|
||||
*
|
||||
* @param options - Find options.
|
||||
*/
|
||||
async get(options: EventReadOptions = {}): Promise<EventRecord[]> {
|
||||
return (await this.#withReadOptions(this.collection.find(this.#withFilters(options)), options)
|
||||
.sort({ created: 1 })
|
||||
.toArray()
|
||||
.then(toParsedRecords(schema))) as EventRecord[];
|
||||
}
|
||||
|
||||
/**
|
||||
* Get events within the given stream.
|
||||
*
|
||||
* @param stream - Stream to fetch events for.
|
||||
* @param options - Read options for modifying the result.
|
||||
*/
|
||||
async getByStream(stream: string, options: EventReadOptions = {}): Promise<EventRecord[]> {
|
||||
return (await this.#withReadOptions(this.collection.find({ stream, ...this.#withFilters(options) }), options)
|
||||
.sort({ created: 1 })
|
||||
.toArray()
|
||||
.then(toParsedRecords(schema))) as EventRecord[];
|
||||
}
|
||||
|
||||
/**
|
||||
* Get events within given list of streams.
|
||||
*
|
||||
* @param streams - Stream to get events for.
|
||||
* @param options - Read options for modifying the result.
|
||||
*/
|
||||
async getByStreams(streams: string[], options: EventReadOptions = {}): Promise<EventRecord[]> {
|
||||
return (await this.#withReadOptions(this.collection.find({ stream: { $in: streams }, ...this.#withFilters(options) }), options)
|
||||
.sort({ created: 1 })
|
||||
.toArray()
|
||||
.then(toParsedRecords(schema))) as EventRecord[];
|
||||
}
|
||||
|
||||
/**
|
||||
* Get a single event by its id.
|
||||
*
|
||||
* @param id - Event id.
|
||||
*/
|
||||
async getById(id: string): Promise<EventRecord | undefined> {
|
||||
return (await this.collection.findOne({ id }).then(toParsedRecord(schema))) as EventRecord | undefined;
|
||||
}
|
||||
|
||||
/**
|
||||
* Check if the given event is outdated in relation to the local event data.
|
||||
*
|
||||
* @param event - Event record to check for outdated state for.
|
||||
*/
|
||||
async checkOutdated({ stream, type, created }: EventRecord): Promise<boolean> {
|
||||
const count = await this.collection.countDocuments({
|
||||
stream,
|
||||
type,
|
||||
created: {
|
||||
$gt: created,
|
||||
},
|
||||
});
|
||||
return count > 0;
|
||||
}
|
||||
|
||||
/*
|
||||
|--------------------------------------------------------------------------------
|
||||
| Utilities
|
||||
|--------------------------------------------------------------------------------
|
||||
*/
|
||||
|
||||
#withFilters({ filter }: EventReadOptions): { type?: { $in: string[] } } {
|
||||
const types = filter?.types;
|
||||
if (types !== undefined) {
|
||||
return { type: { $in: types } };
|
||||
}
|
||||
return {};
|
||||
}
|
||||
|
||||
#withReadOptions(fc: FindCursor, { cursor, direction, limit }: EventReadOptions): FindCursor {
|
||||
if (cursor !== undefined) {
|
||||
if (direction === "desc" || direction === -1) {
|
||||
fc.filter({ created: { $lt: cursor } });
|
||||
} else {
|
||||
fc.filter({ created: { $gt: cursor } });
|
||||
}
|
||||
}
|
||||
if (limit !== undefined) {
|
||||
fc.limit(limit);
|
||||
}
|
||||
return fc;
|
||||
}
|
||||
}
|
||||
168
adapters/mongo/providers/relations.ts
Normal file
168
adapters/mongo/providers/relations.ts
Normal file
@@ -0,0 +1,168 @@
|
||||
import type { Collection } from "mongodb";
|
||||
|
||||
import { Relation, RelationPayload, RelationsProvider } from "../../../types/adapter.ts";
|
||||
import { type RelationSchema, schema } from "../collections/relations.ts";
|
||||
import { DatabaseAccessor } from "../types.ts";
|
||||
import { toParsedRecord, toParsedRecords } from "../utilities.ts";
|
||||
|
||||
export class MongoRelationsProvider implements RelationsProvider {
|
||||
readonly #accessor: DatabaseAccessor;
|
||||
|
||||
constructor(accessor: DatabaseAccessor) {
|
||||
this.#accessor = accessor;
|
||||
}
|
||||
|
||||
get collection(): Collection<RelationSchema> {
|
||||
return this.#accessor.db.collection<RelationSchema>("relations");
|
||||
}
|
||||
|
||||
/**
|
||||
* Handle incoming relation operations.
|
||||
*
|
||||
* @param relations - List of relation operations to execute.
|
||||
*/
|
||||
async handle(relations: Relation[]): Promise<void> {
|
||||
await Promise.all([
|
||||
this.insertMany(relations.filter((relation) => relation.op === "insert")),
|
||||
this.removeMany(relations.filter((relation) => relation.op === "remove")),
|
||||
]);
|
||||
}
|
||||
|
||||
/**
|
||||
* Add stream to the relations table.
|
||||
*
|
||||
* @param key - Relational key to add stream to.
|
||||
* @param stream - Stream to add to the key.
|
||||
*/
|
||||
async insert(key: string, stream: string): Promise<void> {
|
||||
await this.collection.updateOne({ key }, { $addToSet: { streams: stream } }, { upsert: true });
|
||||
}
|
||||
|
||||
/**
|
||||
* Add stream to many relational keys onto the relations table.
|
||||
*
|
||||
* @param relations - Relations to insert.
|
||||
*/
|
||||
async insertMany(relations: RelationPayload[], batchSize = 1_000): Promise<void> {
|
||||
const reduced = relations.reduce((map, { key, stream }) => {
|
||||
if (map.has(key) === false) {
|
||||
map.set(key, new Set<string>());
|
||||
}
|
||||
map.get(key)!.add(stream);
|
||||
return map;
|
||||
}, new Map<string, Set<string>>());
|
||||
|
||||
const bulkOps = [];
|
||||
for (const [key, streams] of reduced) {
|
||||
bulkOps.push({
|
||||
updateOne: {
|
||||
filter: { key },
|
||||
update: { $addToSet: { streams: { $each: Array.from(streams) } } },
|
||||
upsert: true,
|
||||
},
|
||||
});
|
||||
}
|
||||
|
||||
for (let i = 0; i < bulkOps.length; i += batchSize) {
|
||||
await this.collection.bulkWrite(bulkOps.slice(i, i + batchSize), { ordered: false });
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Get a list of event streams registered under the given relational key.
|
||||
*
|
||||
* @param key - Relational key to get event streams for.
|
||||
*/
|
||||
async getByKey(key: string): Promise<string[]> {
|
||||
const relations = await this.collection.findOne({ key }).then(toParsedRecord(schema));
|
||||
if (relations === undefined) {
|
||||
return [];
|
||||
}
|
||||
return relations.streams;
|
||||
}
|
||||
|
||||
/**
|
||||
* Get a list of event streams registered under the given relational keys.
|
||||
*
|
||||
* @param keys - Relational keys to get event streams for.
|
||||
*/
|
||||
async getByKeys(keys: string[]): Promise<string[]> {
|
||||
const streams = new Set<string>();
|
||||
|
||||
const documents = await this.collection
|
||||
.find({ key: { $in: keys } })
|
||||
.toArray()
|
||||
.then(toParsedRecords(schema));
|
||||
documents.forEach((document) => {
|
||||
for (const stream of document.streams) {
|
||||
streams.add(stream);
|
||||
}
|
||||
});
|
||||
|
||||
return Array.from(streams);
|
||||
}
|
||||
|
||||
/**
|
||||
* Removes a stream from the relational table.
|
||||
*
|
||||
* @param key - Relational key to remove stream from.
|
||||
* @param stream - Stream to remove from relation.
|
||||
*/
|
||||
async remove(key: string, stream: string): Promise<void> {
|
||||
await this.collection.updateOne({ key }, { $pull: { streams: stream } });
|
||||
}
|
||||
|
||||
/**
|
||||
* Removes multiple relational entries.
|
||||
*
|
||||
* @param relations - Relations to remove stream from.
|
||||
*/
|
||||
async removeMany(relations: RelationPayload[], batchSize = 1_000): Promise<void> {
|
||||
const reduced = relations.reduce((map, { key, stream }) => {
|
||||
if (!map.has(key)) {
|
||||
map.set(key, new Set());
|
||||
}
|
||||
map.get(key)!.add(stream);
|
||||
return map;
|
||||
}, new Map<string, Set<string>>());
|
||||
|
||||
const bulkOps = [];
|
||||
for (const [key, streams] of reduced) {
|
||||
bulkOps.push({
|
||||
updateOne: {
|
||||
filter: { key },
|
||||
update: { $pull: { streams: { $in: Array.from(streams) } } },
|
||||
},
|
||||
});
|
||||
}
|
||||
|
||||
for (let i = 0; i < bulkOps.length; i += batchSize) {
|
||||
await this.collection.bulkWrite(bulkOps.slice(i, i + batchSize), { ordered: false });
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Remove all relations bound to the given relational keys.
|
||||
*
|
||||
* @param keys - Relational keys to remove from the relational table.
|
||||
*/
|
||||
async removeByKeys(keys: string[]): Promise<void> {
|
||||
await this.collection.deleteMany({ key: { $in: keys } });
|
||||
}
|
||||
|
||||
/**
|
||||
* Remove all relations bound to the given streams.
|
||||
*
|
||||
* @param streams - Streams to remove from the relational table.
|
||||
*/
|
||||
async removeByStreams(streams: string[]): Promise<void> {
|
||||
await this.collection.bulkWrite(
|
||||
streams.map((stream) => ({
|
||||
updateOne: {
|
||||
filter: { streams: stream },
|
||||
update: { $pull: { streams: stream } },
|
||||
},
|
||||
})),
|
||||
);
|
||||
}
|
||||
}
|
||||
50
adapters/mongo/providers/snapshots.ts
Normal file
50
adapters/mongo/providers/snapshots.ts
Normal file
@@ -0,0 +1,50 @@
|
||||
import type { Collection } from "mongodb";
|
||||
|
||||
import { SnapshotsProvider } from "../../../types/adapter.ts";
|
||||
import { schema, type SnapshotSchema } from "../collections/snapshots.ts";
|
||||
import { DatabaseAccessor } from "../types.ts";
|
||||
import { toParsedRecord } from "../utilities.ts";
|
||||
|
||||
export class MongoSnapshotsProvider implements SnapshotsProvider {
|
||||
readonly #accessor: DatabaseAccessor;
|
||||
|
||||
constructor(accessor: DatabaseAccessor) {
|
||||
this.#accessor = accessor;
|
||||
}
|
||||
|
||||
get collection(): Collection<SnapshotSchema> {
|
||||
return this.#accessor.db.collection<SnapshotSchema>("snapshots");
|
||||
}
|
||||
|
||||
/**
|
||||
* Add snapshot state under given reducer stream.
|
||||
*
|
||||
* @param name - Name of the reducer the snapshot is attached to.
|
||||
* @param stream - Stream the snapshot is attached to.
|
||||
* @param cursor - Cursor timestamp for the last event used in the snapshot.
|
||||
* @param state - State of the reduced events.
|
||||
*/
|
||||
async insert(name: string, stream: string, cursor: string, state: Record<string, unknown>): Promise<void> {
|
||||
await this.collection.updateOne({ name }, { $set: { stream, cursor, state } }, { upsert: true });
|
||||
}
|
||||
|
||||
/**
|
||||
* Get snapshot state by stream.
|
||||
*
|
||||
* @param name - Name of the reducer which the state was created.
|
||||
* @param stream - Stream the state was reduced for.
|
||||
*/
|
||||
async getByStream(name: string, stream: string): Promise<SnapshotSchema | undefined> {
|
||||
return this.collection.findOne({ name, stream }).then(toParsedRecord(schema));
|
||||
}
|
||||
|
||||
/**
|
||||
* Removes a snapshot for the given reducer stream.
|
||||
*
|
||||
* @param name - Name of the reducer the snapshot is attached to.
|
||||
* @param stream - Stream to remove from snapshots.
|
||||
*/
|
||||
async remove(name: string, stream: string): Promise<void> {
|
||||
await this.collection.deleteOne({ name, stream });
|
||||
}
|
||||
}
|
||||
11
adapters/mongo/types.ts
Normal file
11
adapters/mongo/types.ts
Normal file
@@ -0,0 +1,11 @@
|
||||
import type { CreateIndexesOptions, Db, IndexSpecification, MongoClient } from "mongodb";
|
||||
|
||||
export type CollectionRegistrar = {
|
||||
name: string;
|
||||
indexes: [IndexSpecification, CreateIndexesOptions?][];
|
||||
};
|
||||
|
||||
export type DatabaseAccessor = {
|
||||
db: Db;
|
||||
client: MongoClient;
|
||||
};
|
||||
43
adapters/mongo/utilities.ts
Normal file
43
adapters/mongo/utilities.ts
Normal file
@@ -0,0 +1,43 @@
|
||||
import type { Db, WithId } from "mongodb";
|
||||
import type { z, ZodObject } from "zod";
|
||||
|
||||
/**
|
||||
* Take a list of records and run it through the given zod parser. This
|
||||
* ensures that all the documents in resulting list adheres to the
|
||||
* expected schematics before b eing returned. Any deviation in the list
|
||||
* will result in an internal error being thrown.
|
||||
*
|
||||
* @param parser - Zod parser to run the documents through.
|
||||
*/
|
||||
export function toParsedRecords<TSchema extends ZodObject>(parser: TSchema): (documents: WithId<object>[]) => z.infer<TSchema>[] {
|
||||
return parser.array().parse;
|
||||
}
|
||||
|
||||
/**
|
||||
* Take a single nullable document value and run it through the given zod
|
||||
* parser. This ensures that the data adheres to the expected schematics
|
||||
* before being returned. Any deviation in the expected response will result
|
||||
* in an internal error being thrown.
|
||||
*
|
||||
* @param parser - Zod parser to run the document through.
|
||||
*/
|
||||
export function toParsedRecord<TSchema extends ZodObject>(parser: TSchema): (document: WithId<object> | null) => z.infer<TSchema> | undefined {
|
||||
return function (document) {
|
||||
if (document === null) {
|
||||
return undefined;
|
||||
}
|
||||
return parser.parse(document);
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* Get a Set of collections that exists on a given mongo database instance.
|
||||
*
|
||||
* @param db - Mongo database to fetch collection list for.
|
||||
*/
|
||||
export async function getCollectionsSet(db: Db) {
|
||||
return db
|
||||
.listCollections()
|
||||
.toArray()
|
||||
.then((collections) => new Set(collections.map((c) => c.name)));
|
||||
}
|
||||
Reference in New Issue
Block a user