feat: release 3.0.0
This commit is contained in:
@@ -1,6 +1,6 @@
|
||||
{
|
||||
"name": "@valkyr/db",
|
||||
"version": "2.1.0",
|
||||
"version": "3.0.0",
|
||||
"exports": {
|
||||
".": "./src/mod.ts"
|
||||
},
|
||||
@@ -24,7 +24,7 @@
|
||||
},
|
||||
|
||||
"test": {
|
||||
"command": "deno test --allow-all ./src",
|
||||
"command": "deno test --allow-all",
|
||||
"description": "Run all tests using Deno’s built-in test runner."
|
||||
},
|
||||
|
||||
|
||||
6
deno.lock
generated
6
deno.lock
generated
@@ -7,11 +7,11 @@
|
||||
"npm:@jsr/std__testing@1": "1.0.16",
|
||||
"npm:@jsr/valkyr__event-emitter@1.0.1": "1.0.1",
|
||||
"npm:@jsr/valkyr__testcontainers@2": "2.0.2",
|
||||
"npm:@types/node@*": "24.2.0",
|
||||
"npm:expect@30.2.0": "30.2.0",
|
||||
"npm:fake-indexeddb@6.2.5": "6.2.5",
|
||||
"npm:idb@8.0.3": "8.0.3",
|
||||
"npm:mingo@7.1.1": "7.1.1",
|
||||
"npm:sorted-btree@2.1.0": "2.1.0",
|
||||
"npm:zod@4.3.4": "4.3.4"
|
||||
},
|
||||
"npm": {
|
||||
@@ -428,9 +428,6 @@
|
||||
"slash@3.0.0": {
|
||||
"integrity": "sha512-g9Q1haeby36OSStwb4ntCGGGaKsaVSjQ68fBxoQcutl5fS1vuY18H3wSt3jFyFtrkx+Kz0V1G85A4MyAdDMi2Q=="
|
||||
},
|
||||
"sorted-btree@2.1.0": {
|
||||
"integrity": "sha512-AtYXy3lL+5jrATpbymC2bM8anN/3maLkmVCd94MzypnKjokfCid/zeS3rvXedv7W6ffSfqKIGdz3UaJPWRBZ0g=="
|
||||
},
|
||||
"sparse-bitfield@3.0.3": {
|
||||
"integrity": "sha512-kvzhi7vqKTfkh0PZU+2D2PIllw2ymqJKujUcyPMd9Y75Nv4nPbGJZXNhxsgdQab2BmlDct1YnfQCguEvHr7VsQ==",
|
||||
"dependencies": [
|
||||
@@ -491,7 +488,6 @@
|
||||
"npm:fake-indexeddb@6.2.5",
|
||||
"npm:idb@8.0.3",
|
||||
"npm:mingo@7.1.1",
|
||||
"npm:sorted-btree@2.1.0",
|
||||
"npm:zod@4.3.4"
|
||||
]
|
||||
}
|
||||
|
||||
@@ -4,10 +4,10 @@ import type { Modifier } from "mingo/updater";
|
||||
import type { ZodObject, ZodRawShape } from "zod";
|
||||
import z from "zod";
|
||||
|
||||
import type { IndexSpec } from "./index/manager.ts";
|
||||
import { observe, observeOne } from "./observe/mod.ts";
|
||||
import type { Index } from "./registrars.ts";
|
||||
import type { ChangeEvent, QueryOptions, Storage, UpdateResult } from "./storage.ts";
|
||||
import type { AnyDocument } from "./types.ts";
|
||||
import type { AnyDocument, QueryCriteria } from "./types.ts";
|
||||
|
||||
/*
|
||||
|--------------------------------------------------------------------------------
|
||||
@@ -44,8 +44,8 @@ export class Collection<
|
||||
|
||||
get primaryKey(): string {
|
||||
for (const index of this.options.indexes ?? []) {
|
||||
if (index[1]?.primary === true) {
|
||||
return index[0] as string;
|
||||
if (index.kind === "primary") {
|
||||
return index.field;
|
||||
}
|
||||
}
|
||||
throw new Error(`Collection '${this.name}' is missing required primary key assignment.`);
|
||||
@@ -57,7 +57,7 @@ export class Collection<
|
||||
|--------------------------------------------------------------------------------
|
||||
*/
|
||||
|
||||
getPrimaryKeyValue(document: AnyDocument): string | number {
|
||||
getPrimaryKeyValue(document: AnyDocument): string {
|
||||
const id = document[this.#pkey];
|
||||
if (id === undefined || typeof id !== "string") {
|
||||
throw new Error(
|
||||
@@ -74,7 +74,9 @@ export class Collection<
|
||||
*/
|
||||
|
||||
async insert(documents: TSchema[]): Promise<void> {
|
||||
return this.storage.resolve().then((storage) => storage.insert(documents));
|
||||
return this.storage
|
||||
.resolve()
|
||||
.then((storage) => storage.insert(documents.map((document) => this.#schema.parse(document))));
|
||||
}
|
||||
|
||||
async update(
|
||||
@@ -122,7 +124,7 @@ export class Collection<
|
||||
* Performs a mingo filter search over the collection data and returns
|
||||
* a single document if one was found matching the filter and options.
|
||||
*/
|
||||
async findOne(condition: Criteria<TSchema> = {}, options: QueryOptions = {}): Promise<TSchema | undefined> {
|
||||
async findOne(condition: QueryCriteria<TSchema> = {}, options: QueryOptions = {}): Promise<TSchema | undefined> {
|
||||
return this.findMany(condition, { ...options, limit: 1 }).then(([document]) => document);
|
||||
}
|
||||
|
||||
@@ -130,7 +132,7 @@ export class Collection<
|
||||
* Performs a mingo filter search over the collection data and returns any
|
||||
* documents matching the provided filter and options.
|
||||
*/
|
||||
async findMany(condition: Criteria<TSchema> = {}, options?: QueryOptions): Promise<TSchema[]> {
|
||||
async findMany(condition: QueryCriteria<TSchema> = {}, options?: QueryOptions): Promise<TSchema[]> {
|
||||
return this.storage
|
||||
.resolve()
|
||||
.then((storage) =>
|
||||
@@ -144,17 +146,17 @@ export class Collection<
|
||||
* Performs a mingo filter search over the collection data and returns
|
||||
* the count of all documents found matching the filter and options.
|
||||
*/
|
||||
async count(condition?: Criteria<TSchema>): Promise<number> {
|
||||
return this.storage.resolve().then((storage) => storage.count({ collection: this.options.name, condition }));
|
||||
async count(condition: Criteria<TSchema> = {}): Promise<number> {
|
||||
return this.storage.resolve().then((storage) => storage.count(condition));
|
||||
}
|
||||
|
||||
/**
|
||||
* Removes all documents from the storage instance.
|
||||
*/
|
||||
flush(): void {
|
||||
this.storage.resolve().then((storage) => {
|
||||
async flush(): Promise<void> {
|
||||
await this.storage.resolve().then(async (storage) => {
|
||||
await storage.flush();
|
||||
storage.broadcast("flush");
|
||||
storage.flush();
|
||||
});
|
||||
}
|
||||
|
||||
@@ -216,5 +218,5 @@ type CollectionOptions<TName extends string, TStorage extends Storage, TSchema e
|
||||
/**
|
||||
* List of custom indexes for the collection.
|
||||
*/
|
||||
indexes: Index[];
|
||||
indexes: IndexSpec<TSchema>[];
|
||||
};
|
||||
|
||||
@@ -1,8 +1,9 @@
|
||||
import { type IDBPDatabase, openDB } from "idb";
|
||||
|
||||
import { Collection } from "../../collection.ts";
|
||||
import type { IndexSpec } from "../../index/manager.ts";
|
||||
import type { DBLogger } from "../../logger.ts";
|
||||
import type { Index, Registrars } from "../../registrars.ts";
|
||||
import type { Registrars } from "../../registrars.ts";
|
||||
import { IndexedDBStorage } from "./storage.ts";
|
||||
|
||||
export class IndexedDB<TOptions extends IndexedDBOptions> {
|
||||
@@ -12,10 +13,22 @@ export class IndexedDB<TOptions extends IndexedDBOptions> {
|
||||
constructor(readonly options: TOptions) {
|
||||
this.#db = openDB(options.name, options.version ?? 1, {
|
||||
upgrade: (db: IDBPDatabase) => {
|
||||
for (const { name, indexes = [] } of options.registrars) {
|
||||
const store = db.createObjectStore(name);
|
||||
for (const [keyPath, options] of indexes) {
|
||||
store.createIndex(keyPath, keyPath, options);
|
||||
for (const { name, indexes } of options.registrars) {
|
||||
const store = db.createObjectStore(name, {
|
||||
keyPath: indexes.find((index: IndexSpec<any>) => index.kind === "primary")?.field,
|
||||
});
|
||||
for (const { field, kind } of indexes) {
|
||||
switch (kind) {
|
||||
case "primary":
|
||||
case "unique": {
|
||||
store.createIndex(field, field, { unique: true });
|
||||
break;
|
||||
}
|
||||
case "shared": {
|
||||
store.createIndex(field, field);
|
||||
break;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
@@ -48,7 +61,7 @@ export class IndexedDB<TOptions extends IndexedDBOptions> {
|
||||
name: TName;
|
||||
storage: IndexedDBStorage;
|
||||
schema: TSchema;
|
||||
indexes: Index[];
|
||||
indexes: IndexSpec<any>[];
|
||||
}> {
|
||||
const collection = this.#collections.get(name);
|
||||
if (collection === undefined) {
|
||||
@@ -73,8 +86,8 @@ export class IndexedDB<TOptions extends IndexedDBOptions> {
|
||||
}
|
||||
}
|
||||
|
||||
close() {
|
||||
this.#db.then((db) => db.close());
|
||||
async close() {
|
||||
await this.#db.then((db) => db.close());
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@@ -3,8 +3,8 @@ import { Query, update } from "mingo";
|
||||
import type { Criteria } from "mingo/types";
|
||||
import type { Modifier } from "mingo/updater";
|
||||
|
||||
import type { IndexSpec } from "../../index/manager.ts";
|
||||
import { type DBLogger, InsertLog, QueryLog, RemoveLog, UpdateLog } from "../../logger.ts";
|
||||
import type { Index } from "../../registrars.ts";
|
||||
import { addOptions, type QueryOptions, Storage, type UpdateResult } from "../../storage.ts";
|
||||
import type { AnyDocument } from "../../types.ts";
|
||||
import { IndexedDBCache } from "./cache.ts";
|
||||
@@ -21,7 +21,7 @@ export class IndexedDBStorage<TSchema extends AnyDocument = AnyDocument> extends
|
||||
|
||||
constructor(
|
||||
name: string,
|
||||
indexes: Index[],
|
||||
indexes: IndexSpec<TSchema>[],
|
||||
promise: Promise<IDBPDatabase>,
|
||||
readonly log: DBLogger = function log() {},
|
||||
) {
|
||||
@@ -29,21 +29,6 @@ export class IndexedDBStorage<TSchema extends AnyDocument = AnyDocument> extends
|
||||
this.#promise = promise;
|
||||
}
|
||||
|
||||
async resolve() {
|
||||
if (this.#db === undefined) {
|
||||
this.#db = await this.#promise;
|
||||
}
|
||||
return this;
|
||||
}
|
||||
|
||||
async has(id: string): Promise<boolean> {
|
||||
const document = await this.db.getFromIndex(this.name, "id", id);
|
||||
if (document !== undefined) {
|
||||
return true;
|
||||
}
|
||||
return false;
|
||||
}
|
||||
|
||||
get db() {
|
||||
if (this.#db === undefined) {
|
||||
throw new Error("Database not initialized");
|
||||
@@ -51,6 +36,13 @@ export class IndexedDBStorage<TSchema extends AnyDocument = AnyDocument> extends
|
||||
return this.#db;
|
||||
}
|
||||
|
||||
async resolve() {
|
||||
if (this.#db === undefined) {
|
||||
this.#db = await this.#promise;
|
||||
}
|
||||
return this;
|
||||
}
|
||||
|
||||
/*
|
||||
|--------------------------------------------------------------------------------
|
||||
| Insert
|
||||
|
||||
@@ -1,5 +1,6 @@
|
||||
import { Collection } from "../../collection.ts";
|
||||
import type { Index, Registrars } from "../../registrars.ts";
|
||||
import type { IndexSpec } from "../../index/manager.ts";
|
||||
import type { Registrars } from "../../registrars.ts";
|
||||
import { MemoryStorage } from "./storage.ts";
|
||||
|
||||
export class MemoryDatabase<TOptions extends MemoryDatabaseOptions> {
|
||||
@@ -42,7 +43,7 @@ export class MemoryDatabase<TOptions extends MemoryDatabaseOptions> {
|
||||
name: TName;
|
||||
storage: MemoryStorage;
|
||||
schema: TSchema;
|
||||
indexes: Index[];
|
||||
indexes: IndexSpec<any>[];
|
||||
}> {
|
||||
const collection = this.#collections.get(name);
|
||||
if (collection === undefined) {
|
||||
|
||||
@@ -5,18 +5,18 @@ import type { Modifier } from "mingo/updater";
|
||||
import { IndexManager, type IndexSpec } from "../../index/manager.ts";
|
||||
import type { UpdateResult } from "../../storage.ts";
|
||||
import { addOptions, type QueryOptions, Storage } from "../../storage.ts";
|
||||
import type { AnyDocument } from "../../types.ts";
|
||||
import type { AnyDocument, StringKeyOf } from "../../types.ts";
|
||||
|
||||
export class MemoryStorage<TSchema extends AnyDocument = AnyDocument> extends Storage<TSchema> {
|
||||
readonly index: IndexManager<TSchema>;
|
||||
|
||||
constructor(name: string, indexes: IndexSpec[]) {
|
||||
constructor(name: string, indexes: IndexSpec<TSchema>[]) {
|
||||
super(name, indexes);
|
||||
this.index = new IndexManager(indexes);
|
||||
}
|
||||
|
||||
get documents() {
|
||||
return this.index.primary.tree;
|
||||
return this.index.primary.documents;
|
||||
}
|
||||
|
||||
async resolve() {
|
||||
@@ -30,14 +30,14 @@ export class MemoryStorage<TSchema extends AnyDocument = AnyDocument> extends St
|
||||
this.broadcast("insert", documents);
|
||||
}
|
||||
|
||||
async getByIndex(index: string, value: string): Promise<TSchema[]> {
|
||||
return this.index.get(index)?.get(value) ?? [];
|
||||
async getByIndex(field: StringKeyOf<TSchema>, value: string): Promise<TSchema[]> {
|
||||
return this.index.getByIndex(field, value);
|
||||
}
|
||||
|
||||
async find(condition: Criteria<TSchema> = {}, options?: QueryOptions): Promise<TSchema[]> {
|
||||
let cursor = new Query(condition).find<TSchema>(this.documents);
|
||||
const cursor = new Query(condition).find<TSchema>(this.documents);
|
||||
if (options !== undefined) {
|
||||
cursor = addOptions(cursor, options);
|
||||
return addOptions(cursor, options).all();
|
||||
}
|
||||
return cursor.all();
|
||||
}
|
||||
@@ -58,7 +58,7 @@ export class MemoryStorage<TSchema extends AnyDocument = AnyDocument> extends St
|
||||
if (modified.length > 0) {
|
||||
modifiedCount += 1;
|
||||
documents.push(document);
|
||||
this.documents.add(document);
|
||||
this.index.update(document);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -72,7 +72,7 @@ export class MemoryStorage<TSchema extends AnyDocument = AnyDocument> extends St
|
||||
async remove(condition: Criteria<TSchema>): Promise<number> {
|
||||
const documents = await this.find(condition);
|
||||
for (const document of documents) {
|
||||
this.documents.delete(document);
|
||||
this.index.remove(document);
|
||||
}
|
||||
this.broadcast("remove", documents);
|
||||
return documents.length;
|
||||
@@ -83,6 +83,6 @@ export class MemoryStorage<TSchema extends AnyDocument = AnyDocument> extends St
|
||||
}
|
||||
|
||||
async flush(): Promise<void> {
|
||||
this.documents.clear();
|
||||
this.index.flush();
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,29 +0,0 @@
|
||||
import { describe, it } from "@std/testing/bdd";
|
||||
import { expect } from "expect";
|
||||
|
||||
import { MemoryStorage } from "../storage.ts";
|
||||
|
||||
/*
|
||||
|--------------------------------------------------------------------------------
|
||||
| Unit Tests
|
||||
|--------------------------------------------------------------------------------
|
||||
*/
|
||||
|
||||
describe("Memory Storage", () => {
|
||||
it("should insert new records", async () => {
|
||||
const storage = new MemoryStorage("test", [["id", { primary: true }]]);
|
||||
|
||||
const documents = [
|
||||
{
|
||||
id: "abc",
|
||||
foo: "bar",
|
||||
},
|
||||
];
|
||||
|
||||
await storage.insert(documents);
|
||||
|
||||
console.log(storage);
|
||||
|
||||
expect(storage.documents).toContain(documents);
|
||||
});
|
||||
});
|
||||
@@ -1,17 +1,19 @@
|
||||
import type { Criteria } from "mingo/types";
|
||||
|
||||
import type { AnyDocument } from "../types.ts";
|
||||
import { PrimaryIndex } from "./primary.ts";
|
||||
import type { AnyDocument, StringKeyOf } from "../types.ts";
|
||||
import { PrimaryIndex, type PrimaryKey } from "./primary.ts";
|
||||
import { SharedIndex } from "./shared.ts";
|
||||
import { UniqueIndex } from "./unique.ts";
|
||||
|
||||
const EMPTY_SET: ReadonlySet<PrimaryKey> = Object.freeze(new Set<PrimaryKey>());
|
||||
|
||||
export class IndexManager<TSchema extends AnyDocument> {
|
||||
readonly primary: PrimaryIndex<TSchema>;
|
||||
|
||||
readonly unique = new Map<keyof TSchema, UniqueIndex>();
|
||||
readonly shared = new Map<keyof TSchema, SharedIndex>();
|
||||
readonly unique = new Map<StringKeyOf<TSchema>, UniqueIndex>();
|
||||
readonly shared = new Map<StringKeyOf<TSchema>, SharedIndex>();
|
||||
|
||||
constructor(specs: IndexSpec[]) {
|
||||
constructor(readonly specs: IndexSpec<TSchema>[]) {
|
||||
const primary = specs.find((spec) => spec.kind === "primary");
|
||||
if (primary === undefined) {
|
||||
throw new Error("Primary index is required");
|
||||
@@ -31,43 +33,165 @@ export class IndexManager<TSchema extends AnyDocument> {
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Atomic insert of the document into the index pools. If any part
|
||||
* of the operation fails all changes are rolled back to their original
|
||||
* states.
|
||||
*
|
||||
* @param document - Document to insert.
|
||||
*/
|
||||
insert(document: TSchema) {
|
||||
const pk = document[this.primary.key];
|
||||
|
||||
const insertedUniques: [StringKeyOf<TSchema>, any][] = [];
|
||||
const insertedShared: [StringKeyOf<TSchema>, any][] = [];
|
||||
|
||||
try {
|
||||
for (const [field, index] of this.unique) {
|
||||
index.insert(document[field], pk);
|
||||
insertedUniques.push([field, document[field]]);
|
||||
}
|
||||
for (const [field, index] of this.shared) {
|
||||
index.insert(document[field], pk);
|
||||
insertedShared.push([field, document[field]]);
|
||||
}
|
||||
this.primary.insert(pk, document);
|
||||
} catch (err) {
|
||||
for (const [field, value] of insertedUniques) {
|
||||
this.unique.get(field)?.delete(value);
|
||||
}
|
||||
for (const [field, value] of insertedShared) {
|
||||
this.shared.get(field)?.delete(value, pk);
|
||||
}
|
||||
throw err;
|
||||
}
|
||||
}
|
||||
|
||||
getByCondition(condition: Criteria<TSchema>): TSchema[] | undefined {
|
||||
// const pks = new Set<any>();
|
||||
// for (const key in condition) {
|
||||
// if (this.indexes.includes(key)) {
|
||||
// if (key === this.primaryKey) {
|
||||
// pks.add(condition[key]);
|
||||
// } else {
|
||||
// const
|
||||
// }
|
||||
// }
|
||||
// }
|
||||
return [];
|
||||
getByCondition(condition: Criteria<TSchema>): TSchema[] {
|
||||
const indexedKeys = Array.from(
|
||||
new Set([this.primary.key as StringKeyOf<TSchema>, ...this.unique.keys(), ...this.shared.keys()]),
|
||||
);
|
||||
|
||||
const candidatePKs: PrimaryKey[] = [];
|
||||
|
||||
// ### Primary Keys
|
||||
// Collect primary keys for indexed equality conditions
|
||||
|
||||
const pkSets: ReadonlySet<PrimaryKey>[] = [];
|
||||
|
||||
for (const key of indexedKeys) {
|
||||
const value = (condition as any)[key];
|
||||
if (value !== undefined) {
|
||||
// Use index if available
|
||||
const pks = this.getPrimaryKeysByIndex(key, value);
|
||||
pkSets.push(pks);
|
||||
}
|
||||
}
|
||||
|
||||
// ### Intersect
|
||||
// Intersect all sets to find candidates
|
||||
|
||||
if (pkSets.length > 0) {
|
||||
const sortedSets = pkSets.sort((a, b) => a.size - b.size);
|
||||
const intersection = new Set(sortedSets[0]);
|
||||
for (let i = 1; i < sortedSets.length; i++) {
|
||||
for (const pk of intersection) {
|
||||
if (!sortedSets[i].has(pk)) {
|
||||
intersection.delete(pk);
|
||||
}
|
||||
}
|
||||
}
|
||||
candidatePKs.push(...intersection);
|
||||
} else {
|
||||
candidatePKs.push(...this.primary.keys()); // no indexed fields → scan all primary keys
|
||||
}
|
||||
|
||||
// ### Filter
|
||||
// Filter candidates by remaining condition
|
||||
|
||||
const results: TSchema[] = [];
|
||||
for (const pk of candidatePKs) {
|
||||
const doc = this.primary.get(pk);
|
||||
if (doc === undefined) {
|
||||
continue;
|
||||
}
|
||||
let match = true;
|
||||
for (const [field, expected] of Object.entries(condition)) {
|
||||
if ((doc as any)[field] !== expected) {
|
||||
match = false;
|
||||
break;
|
||||
}
|
||||
}
|
||||
if (match) {
|
||||
results.push(doc);
|
||||
}
|
||||
}
|
||||
|
||||
return results;
|
||||
}
|
||||
|
||||
/**
|
||||
* Get all primary keys found for given field => value pair.
|
||||
*
|
||||
* @param field - Field to lookup.
|
||||
* @param value - Value to lookup.
|
||||
*/
|
||||
getPrimaryKeysByIndex(field: StringKeyOf<TSchema>, value: any): ReadonlySet<PrimaryKey> {
|
||||
if (field === this.primary.key) {
|
||||
if (this.primary.has(value)) {
|
||||
return new Set([value]);
|
||||
}
|
||||
return EMPTY_SET;
|
||||
}
|
||||
if (this.unique.has(field)) {
|
||||
const pk = this.unique.get(field)?.lookup(value);
|
||||
if (pk === undefined) {
|
||||
return EMPTY_SET;
|
||||
}
|
||||
return new Set([pk]);
|
||||
}
|
||||
return this.shared.get(field)?.lookup(value) ?? EMPTY_SET;
|
||||
}
|
||||
|
||||
/**
|
||||
* Get document by primary key.
|
||||
*
|
||||
* @param pk - Primary key to fetch document for.
|
||||
*/
|
||||
getByPrimary(pk: string): TSchema | undefined {
|
||||
return this.primary.get(pk);
|
||||
}
|
||||
|
||||
getByUnique(field: keyof TSchema, value: any): TSchema | undefined {
|
||||
/**
|
||||
* Get a document found for given field => value pair.
|
||||
*
|
||||
* @param field - Field to lookup.
|
||||
* @param value - Value to lookup.
|
||||
*/
|
||||
getByUnique(field: StringKeyOf<TSchema>, value: any): TSchema | undefined {
|
||||
const pk = this.unique.get(field)?.lookup(value);
|
||||
if (pk !== undefined) {
|
||||
return this.primary.get(pk);
|
||||
}
|
||||
}
|
||||
|
||||
getByIndex(field: keyof TSchema, value: any): TSchema[] {
|
||||
/**
|
||||
* Get all documents found for given field => value pair.
|
||||
*
|
||||
* @note This method may clean up stale index entries during reads.
|
||||
*
|
||||
* @param field - Field to lookup.
|
||||
* @param value - Value to lookup.
|
||||
*/
|
||||
getByIndex(field: StringKeyOf<TSchema>, value: any): TSchema[] {
|
||||
if (field === this.primary.key) {
|
||||
const document = this.getByPrimary(value);
|
||||
if (document === undefined) {
|
||||
return [];
|
||||
}
|
||||
return [document];
|
||||
}
|
||||
|
||||
if (this.unique.has(field)) {
|
||||
const document = this.getByUnique(field, value);
|
||||
if (document === undefined) {
|
||||
@@ -94,23 +218,94 @@ export class IndexManager<TSchema extends AnyDocument> {
|
||||
return documents;
|
||||
}
|
||||
|
||||
remove(pk: string) {
|
||||
const document = this.primary.get(pk);
|
||||
if (document === undefined) {
|
||||
return;
|
||||
/**
|
||||
* Update indexes for given document.
|
||||
*
|
||||
* @note If the document does not exist it will be inserted.
|
||||
*
|
||||
* @param document - Document to update against current index.
|
||||
*/
|
||||
update(document: TSchema) {
|
||||
const pk = document[this.primary.key];
|
||||
const current = this.primary.get(pk);
|
||||
|
||||
if (current === undefined) {
|
||||
return this.insert(document);
|
||||
}
|
||||
|
||||
const revertedUniques: [StringKeyOf<TSchema>, any][] = [];
|
||||
const revertedShared: [StringKeyOf<TSchema>, any][] = [];
|
||||
|
||||
try {
|
||||
for (const [field, index] of this.unique) {
|
||||
index.delete(document[field]);
|
||||
if (current[field] !== document[field]) {
|
||||
index.delete(current[field]);
|
||||
index.insert(document[field], pk);
|
||||
revertedUniques.push([field, current[field]]);
|
||||
}
|
||||
}
|
||||
for (const [field, index] of this.shared) {
|
||||
index.delete(document[field], pk);
|
||||
if (current[field] !== document[field]) {
|
||||
index.delete(current[field], pk);
|
||||
index.insert(document[field], pk);
|
||||
revertedShared.push([field, current[field]]);
|
||||
}
|
||||
this.primary.delete(pk);
|
||||
}
|
||||
this.primary.replace(pk, document);
|
||||
} catch (err) {
|
||||
for (const [field, value] of revertedUniques) {
|
||||
this.unique.get(field)?.insert(value, pk);
|
||||
this.unique.get(field)?.delete(document[field]);
|
||||
}
|
||||
for (const [field, value] of revertedShared) {
|
||||
this.shared.get(field)?.insert(value, pk);
|
||||
this.shared.get(field)?.delete(document[field], pk);
|
||||
}
|
||||
throw err;
|
||||
}
|
||||
}
|
||||
|
||||
export type IndexSpec = {
|
||||
field: string;
|
||||
/**
|
||||
* Remove all indexes related to given document.
|
||||
*
|
||||
* @param document - Document to remove.
|
||||
*/
|
||||
remove(document: TSchema) {
|
||||
const pk = document[this.primary.key];
|
||||
const current = this.primary.get(pk);
|
||||
if (current === undefined) {
|
||||
return;
|
||||
}
|
||||
for (const [field, index] of this.unique) {
|
||||
index.delete(current[field]);
|
||||
}
|
||||
for (const [field, index] of this.shared) {
|
||||
index.delete(current[field], pk);
|
||||
}
|
||||
this.primary.delete(pk);
|
||||
}
|
||||
|
||||
flush() {
|
||||
this.primary.flush();
|
||||
this.unique.clear();
|
||||
this.shared.clear();
|
||||
for (const spec of this.specs) {
|
||||
switch (spec.kind) {
|
||||
case "unique": {
|
||||
this.unique.set(spec.field, new UniqueIndex());
|
||||
break;
|
||||
}
|
||||
case "shared": {
|
||||
this.shared.set(spec.field, new SharedIndex());
|
||||
break;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
export type IndexSpec<TSchema extends AnyDocument> = {
|
||||
field: StringKeyOf<TSchema>;
|
||||
kind: IndexKind;
|
||||
};
|
||||
|
||||
|
||||
@@ -7,6 +7,18 @@ export class PrimaryIndex<TSchema extends AnyDocument> {
|
||||
|
||||
constructor(readonly key: string) {}
|
||||
|
||||
get documents() {
|
||||
return Array.from(this.#index.values());
|
||||
}
|
||||
|
||||
keys() {
|
||||
return Array.from(this.#index.keys());
|
||||
}
|
||||
|
||||
has(pk: PrimaryKey): boolean {
|
||||
return this.#index.has(pk);
|
||||
}
|
||||
|
||||
insert(pk: PrimaryKey, document: TSchema) {
|
||||
if (this.#index.has(pk)) {
|
||||
throw new Error(`Duplicate primary key: ${pk}`);
|
||||
@@ -18,7 +30,15 @@ export class PrimaryIndex<TSchema extends AnyDocument> {
|
||||
return this.#index.get(pk);
|
||||
}
|
||||
|
||||
replace(pk: PrimaryKey, document: TSchema) {
|
||||
this.#index.set(pk, document);
|
||||
}
|
||||
|
||||
delete(pk: PrimaryKey) {
|
||||
this.#index.delete(pk);
|
||||
}
|
||||
|
||||
flush() {
|
||||
this.#index.clear();
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,5 +1,7 @@
|
||||
import type { PrimaryKey } from "./primary.ts";
|
||||
|
||||
const EMPTY_SET: ReadonlySet<PrimaryKey> = Object.freeze(new Set<PrimaryKey>());
|
||||
|
||||
export class SharedIndex {
|
||||
readonly #index = new Map<string, Set<PrimaryKey>>();
|
||||
|
||||
@@ -23,8 +25,8 @@ export class SharedIndex {
|
||||
*
|
||||
* @param value - Value to lookup a primary key for.
|
||||
*/
|
||||
lookup(value: any): Set<PrimaryKey> {
|
||||
return this.#index.get(value) ?? new Set();
|
||||
lookup(value: any): ReadonlySet<PrimaryKey> {
|
||||
return this.#index.get(value) ?? EMPTY_SET;
|
||||
}
|
||||
|
||||
/**
|
||||
|
||||
@@ -13,7 +13,7 @@ export function observe<TCollection extends Collection>(
|
||||
options: QueryOptions | undefined,
|
||||
onChange: (documents: AnyDocument[], changed: AnyDocument[], type: ChangeEvent["type"]) => void,
|
||||
): Subscription {
|
||||
const documents = new Map<string | number, AnyDocument>();
|
||||
const cache = new Map<string, AnyDocument>();
|
||||
|
||||
let debounce: any;
|
||||
|
||||
@@ -21,6 +21,9 @@ export function observe<TCollection extends Collection>(
|
||||
// Find the initial documents and send them to the change listener.
|
||||
|
||||
collection.findMany(condition, options).then(async (documents) => {
|
||||
for (const document of documents) {
|
||||
cache.set(collection.getPrimaryKeyValue(document), document);
|
||||
}
|
||||
onChange(documents, documents, "insert");
|
||||
});
|
||||
|
||||
@@ -37,7 +40,7 @@ export function observe<TCollection extends Collection>(
|
||||
case "insert": {
|
||||
for (const document of data) {
|
||||
if (isMatch(document, condition)) {
|
||||
documents.set(collection.getPrimaryKeyValue(document), document);
|
||||
cache.set(collection.getPrimaryKeyValue(document), document);
|
||||
changed.push(document);
|
||||
}
|
||||
}
|
||||
@@ -46,15 +49,15 @@ export function observe<TCollection extends Collection>(
|
||||
case "update": {
|
||||
for (const document of data) {
|
||||
const id = collection.getPrimaryKeyValue(document);
|
||||
if (documents.has(id)) {
|
||||
if (cache.has(id)) {
|
||||
if (isMatch(document, condition)) {
|
||||
documents.set(id, document);
|
||||
cache.set(id, document);
|
||||
} else {
|
||||
documents.delete(id);
|
||||
cache.delete(id);
|
||||
}
|
||||
changed.push(document);
|
||||
} else if (isMatch(document, condition)) {
|
||||
documents.set(id, document);
|
||||
cache.set(id, document);
|
||||
changed.push(document);
|
||||
}
|
||||
}
|
||||
@@ -63,7 +66,7 @@ export function observe<TCollection extends Collection>(
|
||||
case "remove": {
|
||||
for (const document of data) {
|
||||
if (isMatch(document, condition)) {
|
||||
documents.delete(collection.getPrimaryKeyValue(document));
|
||||
cache.delete(collection.getPrimaryKeyValue(document));
|
||||
changed.push(document);
|
||||
}
|
||||
}
|
||||
@@ -73,7 +76,7 @@ export function observe<TCollection extends Collection>(
|
||||
if (changed.length > 0) {
|
||||
clearTimeout(debounce);
|
||||
debounce = setTimeout(() => {
|
||||
onChange(applyQueryOptions(Array.from(documents.values()), options), changed, type);
|
||||
onChange(applyQueryOptions(Array.from(cache.values()), options), changed, type);
|
||||
}, 0);
|
||||
}
|
||||
}),
|
||||
|
||||
@@ -1,6 +1,9 @@
|
||||
import type { AnyDocument } from "@valkyr/db";
|
||||
import type { ZodRawShape } from "zod";
|
||||
|
||||
export type Registrars<TSchema extends ZodRawShape = ZodRawShape> = {
|
||||
import type { IndexSpec } from "./index/manager.ts";
|
||||
|
||||
export type Registrars<TSchema extends AnyDocument = ZodRawShape> = {
|
||||
/**
|
||||
* Name of the collection.
|
||||
*/
|
||||
@@ -14,5 +17,5 @@ export type Registrars<TSchema extends ZodRawShape = ZodRawShape> = {
|
||||
/**
|
||||
* List of custom indexes for the collection.
|
||||
*/
|
||||
indexes: IndexSpec[];
|
||||
indexes: IndexSpec<TSchema>[];
|
||||
};
|
||||
|
||||
@@ -4,7 +4,7 @@ import type { Criteria } from "mingo/types";
|
||||
import type { Modifier } from "mingo/updater";
|
||||
|
||||
import { BroadcastChannel, type StorageBroadcast } from "./broadcast.ts";
|
||||
import type { Index } from "./registrars.ts";
|
||||
import type { IndexSpec } from "./index/manager.ts";
|
||||
import type { AnyDocument } from "./types.ts";
|
||||
|
||||
type StorageEvent = "change" | "flush";
|
||||
@@ -25,10 +25,10 @@ export abstract class Storage<TSchema extends AnyDocument = AnyDocument> {
|
||||
/**
|
||||
* List of indexes to optimize storage lookups.
|
||||
*/
|
||||
readonly indexes: Index[],
|
||||
readonly indexes: IndexSpec<TSchema>[],
|
||||
) {
|
||||
if (primaryIndexCount(indexes) !== 1) {
|
||||
throw new Error("Storage is missing or has more than 1 defined primaryIndex");
|
||||
throw new Error("missing required primary key assignment");
|
||||
}
|
||||
this.#channel = new BroadcastChannel(`@valkyr/db:${name}`);
|
||||
this.#channel.onmessage = ({ data }: MessageEvent<StorageBroadcast>) => {
|
||||
@@ -174,7 +174,7 @@ export function addOptions<TSchema extends AnyDocument = AnyDocument>(
|
||||
cursor: Cursor<TSchema>,
|
||||
options: QueryOptions,
|
||||
): Cursor<TSchema> {
|
||||
if (options.sort) {
|
||||
if (options.sort !== undefined) {
|
||||
cursor.sort(options.sort);
|
||||
}
|
||||
if (options.skip !== undefined) {
|
||||
@@ -186,10 +186,10 @@ export function addOptions<TSchema extends AnyDocument = AnyDocument>(
|
||||
return cursor;
|
||||
}
|
||||
|
||||
function primaryIndexCount(indexes: Index[]): number {
|
||||
function primaryIndexCount<TSchema extends AnyDocument = AnyDocument>(indexes: IndexSpec<TSchema>[]): number {
|
||||
let count = 0;
|
||||
for (const [, options] of indexes) {
|
||||
if (options?.primary === true) {
|
||||
for (const { kind } of indexes) {
|
||||
if (kind === "primary") {
|
||||
count += 1;
|
||||
}
|
||||
}
|
||||
|
||||
19
src/types.ts
19
src/types.ts
@@ -1,3 +1,5 @@
|
||||
import type { Criteria } from "mingo/types";
|
||||
|
||||
/**
|
||||
* Represents an unknown document with global support.
|
||||
*/
|
||||
@@ -5,7 +7,24 @@ export type AnyDocument = {
|
||||
[key: string]: any;
|
||||
};
|
||||
|
||||
export type StringKeyOf<T> = Extract<keyof T, string>;
|
||||
|
||||
/**
|
||||
* Simplifies a complex type.
|
||||
*/
|
||||
export type Prettify<T> = { [K in keyof T]: T[K] } & {};
|
||||
|
||||
/**
|
||||
* Extended Criteria type that includes MongoDB logical and comparison operators
|
||||
*/
|
||||
export type QueryCriteria<T> = Criteria<T> & {
|
||||
$and?: QueryCriteria<T>[];
|
||||
$or?: QueryCriteria<T>[];
|
||||
$nor?: QueryCriteria<T>[];
|
||||
$not?: QueryCriteria<T>;
|
||||
|
||||
$exists?: boolean;
|
||||
$type?: string | number;
|
||||
|
||||
[key: string]: any;
|
||||
};
|
||||
|
||||
@@ -1,52 +0,0 @@
|
||||
import { afterEach, beforeEach, describe, it } from "@std/testing/bdd";
|
||||
import { expect } from "expect";
|
||||
|
||||
import { IndexedDbCache } from "../src/databases/indexeddb/cache.ts";
|
||||
import type { Options } from "../src/storage/storage.ts";
|
||||
import type { WithId } from "../src/types.ts";
|
||||
|
||||
describe("IndexedDbCache", () => {
|
||||
let cache: IndexedDbCache;
|
||||
|
||||
beforeEach(() => {
|
||||
cache = new IndexedDbCache();
|
||||
});
|
||||
|
||||
afterEach(() => {
|
||||
cache.flush();
|
||||
});
|
||||
|
||||
const sampleDocuments: WithId<{ name: string }>[] = [
|
||||
{ id: "doc1", name: "Document 1" },
|
||||
{ id: "doc2", name: "Document 2" },
|
||||
];
|
||||
|
||||
const sampleCriteria = { name: { $eq: "Document 1" } };
|
||||
const sampleOptions: Options = { sort: { name: 1 } };
|
||||
|
||||
it("hash", () => {
|
||||
const hashCode = cache.hash(sampleCriteria, sampleOptions);
|
||||
expect(typeof hashCode).toBe("number");
|
||||
});
|
||||
|
||||
it("set and get", () => {
|
||||
const hashCode = cache.hash(sampleCriteria, sampleOptions);
|
||||
cache.set(hashCode, sampleDocuments);
|
||||
const result = cache.get(hashCode);
|
||||
expect(result).toEqual(sampleDocuments);
|
||||
});
|
||||
|
||||
it("get undefined", () => {
|
||||
const hashCode = cache.hash(sampleCriteria, sampleOptions);
|
||||
const result = cache.get(hashCode);
|
||||
expect(result).toBeUndefined();
|
||||
});
|
||||
|
||||
it("flush", () => {
|
||||
const hashCode = cache.hash(sampleCriteria, sampleOptions);
|
||||
cache.set(hashCode, sampleDocuments);
|
||||
cache.flush();
|
||||
const result = cache.get(hashCode);
|
||||
expect(result).toBeUndefined();
|
||||
});
|
||||
});
|
||||
File diff suppressed because it is too large
Load Diff
@@ -1,15 +0,0 @@
|
||||
import { describe, it } from "@std/testing/bdd";
|
||||
import { expect } from "expect";
|
||||
|
||||
import { hashCodeQuery } from "../src/hash.ts";
|
||||
import type { Options } from "../src/mod.ts";
|
||||
|
||||
describe("hashCodeQuery", () => {
|
||||
const filter = { name: { $eq: "Document 1" } };
|
||||
const options: Options = { sort: { name: 1 } };
|
||||
|
||||
it("return correct hash code", () => {
|
||||
const hashCode = hashCodeQuery(filter, options);
|
||||
expect(typeof hashCode).toBe("number");
|
||||
});
|
||||
});
|
||||
307
tests/index-manager.test.ts
Normal file
307
tests/index-manager.test.ts
Normal file
@@ -0,0 +1,307 @@
|
||||
import { describe, it } from "@std/testing/bdd";
|
||||
import { expect } from "expect";
|
||||
|
||||
import { IndexManager, type IndexSpec } from "../src/index/manager.ts";
|
||||
|
||||
describe("IndexManager", () => {
|
||||
type User = {
|
||||
id: string;
|
||||
email: string;
|
||||
group: string;
|
||||
name: string;
|
||||
};
|
||||
|
||||
const specs: IndexSpec<User>[] = [
|
||||
{ field: "id", kind: "primary" },
|
||||
{ field: "email", kind: "unique" },
|
||||
{ field: "group", kind: "shared" },
|
||||
];
|
||||
|
||||
it("should insert and retrieve documents by primary, unique, and shared indexes", () => {
|
||||
const manager = new IndexManager<User>(specs);
|
||||
|
||||
const user1: User = { id: "u1", email: "a@example.com", group: "staff", name: "Alice" };
|
||||
const user2: User = { id: "u2", email: "b@example.com", group: "staff", name: "Bob" };
|
||||
const user3: User = { id: "u3", email: "c@example.com", group: "admin", name: "Carol" };
|
||||
|
||||
// insert
|
||||
manager.insert(user1);
|
||||
manager.insert(user2);
|
||||
manager.insert(user3);
|
||||
|
||||
// primary lookup
|
||||
expect(manager.getByPrimary("u1")).toEqual(user1);
|
||||
expect(manager.getByPrimary("u2")).toEqual(user2);
|
||||
expect(manager.getByPrimary("u3")).toEqual(user3);
|
||||
|
||||
// unique lookup
|
||||
expect(manager.getByUnique("email", "a@example.com")).toEqual(user1);
|
||||
expect(manager.getByUnique("email", "b@example.com")).toEqual(user2);
|
||||
|
||||
// shared lookup
|
||||
const staff = manager.getByIndex("group", "staff");
|
||||
expect(staff).toHaveLength(2);
|
||||
expect(staff).toContainEqual(user1);
|
||||
expect(staff).toContainEqual(user2);
|
||||
|
||||
const admin = manager.getByIndex("group", "admin");
|
||||
expect(admin).toHaveLength(1);
|
||||
expect(admin[0]).toEqual(user3);
|
||||
|
||||
// unknown lookup
|
||||
expect(manager.getByPrimary("unknown")).toBeUndefined();
|
||||
expect(manager.getByUnique("email", "notfound@example.com")).toBeUndefined();
|
||||
expect(manager.getByIndex("group", "none")).toEqual([]);
|
||||
});
|
||||
|
||||
it("should enforce unique constraints", () => {
|
||||
const manager = new IndexManager<User>(specs);
|
||||
|
||||
const user: User = { id: "u1", email: "a@example.com", group: "staff", name: "Alice" };
|
||||
manager.insert(user);
|
||||
|
||||
const dupEmail: User = { id: "u2", email: "a@example.com", group: "admin", name: "Bob" };
|
||||
expect(() => manager.insert(dupEmail)).toThrow(/Unique constraint violation/);
|
||||
});
|
||||
|
||||
it("should remove documents and clean up indexes", () => {
|
||||
const manager = new IndexManager<User>(specs);
|
||||
|
||||
const user: User = { id: "u1", email: "a@example.com", group: "staff", name: "Alice" };
|
||||
manager.insert(user);
|
||||
|
||||
// sanity
|
||||
expect(manager.getByPrimary("u1")).toEqual(user);
|
||||
expect(manager.getByUnique("email", "a@example.com")).toEqual(user);
|
||||
expect(manager.getByIndex("group", "staff")).toContainEqual(user);
|
||||
|
||||
// remove
|
||||
manager.remove(user);
|
||||
|
||||
expect(manager.getByPrimary("u1")).toBeUndefined();
|
||||
expect(manager.getByUnique("email", "a@example.com")).toBeUndefined();
|
||||
expect(manager.getByIndex("group", "staff")).toEqual([]);
|
||||
});
|
||||
|
||||
it("should update existing documents", () => {
|
||||
const manager = new IndexManager<User>(specs);
|
||||
|
||||
const user: User = { id: "u1", email: "a@example.com", group: "staff", name: "Alice" };
|
||||
manager.insert(user);
|
||||
|
||||
// update email and group
|
||||
const updated: User = { ...user, email: "a_new@example.com", group: "admin" };
|
||||
manager.update(updated);
|
||||
|
||||
// old unique index cleared
|
||||
expect(manager.getByUnique("email", "a@example.com")).toBeUndefined();
|
||||
|
||||
// new unique index works
|
||||
expect(manager.getByUnique("email", "a_new@example.com")).toEqual(updated);
|
||||
|
||||
// old shared index cleared
|
||||
expect(manager.getByIndex("group", "staff")).toEqual([]);
|
||||
|
||||
// new shared index works
|
||||
expect(manager.getByIndex("group", "admin")).toContainEqual(updated);
|
||||
|
||||
// primary still points to updated document
|
||||
expect(manager.getByPrimary("u1")).toEqual(updated);
|
||||
});
|
||||
|
||||
it("should perform upsert if primary key does not exist", () => {
|
||||
const manager = new IndexManager<User>(specs);
|
||||
|
||||
const user: User = { id: "u1", email: "a@example.com", group: "staff", name: "Alice" };
|
||||
|
||||
// update on non-existent PK acts as insert
|
||||
manager.update(user);
|
||||
|
||||
expect(manager.getByPrimary("u1")).toEqual(user);
|
||||
expect(manager.getByUnique("email", "a@example.com")).toEqual(user);
|
||||
expect(manager.getByIndex("group", "staff")).toContainEqual(user);
|
||||
});
|
||||
|
||||
it("should lazily clean up stale shared index references", () => {
|
||||
const manager = new IndexManager<User>(specs);
|
||||
|
||||
const user: User = { id: "u1", email: "a@example.com", group: "staff", name: "Alice" };
|
||||
manager.insert(user);
|
||||
|
||||
// manually delete primary without cleaning shared
|
||||
manager.primary.delete("u1");
|
||||
|
||||
// getByIndex should remove stale reference
|
||||
const result = manager.getByIndex("group", "staff");
|
||||
expect(result).toEqual([]);
|
||||
// after lazy cleanup, lookup should also be empty
|
||||
expect(manager.getPrimaryKeysByIndex("group", "staff")).toEqual(new Set());
|
||||
});
|
||||
|
||||
describe(".getByCondition", () => {
|
||||
type User = {
|
||||
id: string;
|
||||
email: string;
|
||||
group: string;
|
||||
name: string;
|
||||
active: boolean;
|
||||
};
|
||||
|
||||
const specs: IndexSpec<User>[] = [
|
||||
{ field: "id", kind: "primary" },
|
||||
{ field: "email", kind: "unique" },
|
||||
{ field: "group", kind: "shared" },
|
||||
];
|
||||
|
||||
it("should find documents by primary key", () => {
|
||||
const manager = new IndexManager<User>(specs);
|
||||
|
||||
const user = { id: "u1", email: "a@example.com", group: "staff", name: "Alice", active: true };
|
||||
manager.insert(user);
|
||||
|
||||
const results = manager.getByCondition({ id: "u1" });
|
||||
expect(results).toHaveLength(1);
|
||||
expect(results[0]).toEqual(user);
|
||||
});
|
||||
|
||||
it("should find documents by unique index", () => {
|
||||
const manager = new IndexManager<User>(specs);
|
||||
|
||||
const user = { id: "u1", email: "a@example.com", group: "staff", name: "Alice", active: true };
|
||||
manager.insert(user);
|
||||
|
||||
const results = manager.getByCondition({ email: "a@example.com" });
|
||||
expect(results).toHaveLength(1);
|
||||
expect(results[0]).toEqual(user);
|
||||
});
|
||||
|
||||
it("should find documents by shared index", () => {
|
||||
const manager = new IndexManager<User>(specs);
|
||||
|
||||
const user1 = { id: "u1", email: "a@example.com", group: "staff", name: "Alice", active: true };
|
||||
const user2 = { id: "u2", email: "b@example.com", group: "staff", name: "Bob", active: false };
|
||||
const user3 = { id: "u3", email: "c@example.com", group: "admin", name: "Carol", active: true };
|
||||
|
||||
manager.insert(user1);
|
||||
manager.insert(user2);
|
||||
manager.insert(user3);
|
||||
|
||||
const staff = manager.getByCondition({ group: "staff" });
|
||||
expect(staff).toHaveLength(2);
|
||||
expect(staff).toContainEqual(user1);
|
||||
expect(staff).toContainEqual(user2);
|
||||
|
||||
const admin = manager.getByCondition({ group: "admin" });
|
||||
expect(admin).toHaveLength(1);
|
||||
expect(admin[0]).toEqual(user3);
|
||||
});
|
||||
|
||||
it("should handle multiple fields with intersection", () => {
|
||||
const manager = new IndexManager<User>(specs);
|
||||
|
||||
const user1 = { id: "u1", email: "a@example.com", group: "staff", name: "Alice", active: true };
|
||||
const user2 = { id: "u2", email: "b@example.com", group: "staff", name: "Bob", active: false };
|
||||
manager.insert(user1);
|
||||
manager.insert(user2);
|
||||
|
||||
// Lookup by shared + non-indexed field
|
||||
const results = manager.getByCondition({ group: "staff", active: true });
|
||||
expect(results).toHaveLength(1);
|
||||
expect(results[0]).toEqual(user1);
|
||||
});
|
||||
|
||||
it("should return empty array if no match", () => {
|
||||
const manager = new IndexManager<User>(specs);
|
||||
|
||||
const user = { id: "u1", email: "a@example.com", group: "staff", name: "Alice", active: true };
|
||||
manager.insert(user);
|
||||
|
||||
const results = manager.getByCondition({ group: "admin" });
|
||||
expect(results).toEqual([]);
|
||||
|
||||
const results2 = manager.getByCondition({ email: "nonexistent@example.com" });
|
||||
expect(results2).toEqual([]);
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
describe("IndexManager Performance", () => {
|
||||
type User = {
|
||||
id: string;
|
||||
email: string;
|
||||
group: string;
|
||||
name: string;
|
||||
};
|
||||
|
||||
const NUM_RECORDS = 100_000;
|
||||
|
||||
const specs: IndexSpec<User>[] = [
|
||||
{ field: "id", kind: "primary" },
|
||||
{ field: "email", kind: "unique" },
|
||||
{ field: "group", kind: "shared" },
|
||||
];
|
||||
|
||||
it("should insert and query thousands of records efficiently", () => {
|
||||
const manager = new IndexManager<User>(specs);
|
||||
|
||||
const groups = ["staff", "admin", "guest", "manager"];
|
||||
|
||||
console.time("Insert 100k records");
|
||||
for (let i = 0; i < NUM_RECORDS; i++) {
|
||||
const user: User = {
|
||||
id: `user_${i}`,
|
||||
email: `user_${i}@example.com`,
|
||||
group: groups[i % groups.length],
|
||||
name: `User ${i}`,
|
||||
};
|
||||
manager.insert(user);
|
||||
}
|
||||
console.timeEnd("Insert 100k records");
|
||||
|
||||
// Check total number of records
|
||||
expect(manager.getByPrimary("user_0")?.name).toEqual("User 0");
|
||||
expect(manager.getByPrimary(`user_${NUM_RECORDS - 1}`)?.name).toEqual(`User ${NUM_RECORDS - 1}`);
|
||||
|
||||
// Unique lookup
|
||||
console.time("Unique lookup 10k");
|
||||
for (let i = 0; i < 10_000; i++) {
|
||||
const user = manager.getByUnique("email", `user_${i}@example.com`);
|
||||
expect(user?.id).toEqual(`user_${i}`);
|
||||
}
|
||||
console.timeEnd("Unique lookup 10k");
|
||||
|
||||
// Shared lookup
|
||||
console.time("Shared lookup");
|
||||
for (const group of groups) {
|
||||
const users = manager.getByIndex("group", group);
|
||||
expect(users.length).toBeGreaterThan(0);
|
||||
}
|
||||
console.timeEnd("Shared lookup");
|
||||
|
||||
// Update some users
|
||||
console.time("Update 10k records");
|
||||
for (let i = 0; i < 10_000; i++) {
|
||||
const user = manager.getByPrimary(`user_${i}`);
|
||||
if (!user) {
|
||||
continue;
|
||||
}
|
||||
const updated = { ...user, group: groups[(i + 1) % groups.length] };
|
||||
manager.update(updated);
|
||||
}
|
||||
console.timeEnd("Update 10k records");
|
||||
|
||||
// Remove some users
|
||||
console.time("Remove 10k records");
|
||||
for (let i = 0; i < 10_000; i++) {
|
||||
const user = manager.getByPrimary(`user_${i}`);
|
||||
if (user) {
|
||||
manager.remove(user);
|
||||
}
|
||||
}
|
||||
console.timeEnd("Remove 10k records");
|
||||
|
||||
// Spot check
|
||||
expect(manager.getByPrimary("user_0")).toBeUndefined();
|
||||
expect(manager.getByPrimary(`user_${10_001}`)).not.toBeUndefined();
|
||||
});
|
||||
});
|
||||
116
tests/indexeddb-storage.test.ts
Normal file
116
tests/indexeddb-storage.test.ts
Normal file
@@ -0,0 +1,116 @@
|
||||
import { afterAll, afterEach, beforeAll, describe, it } from "@std/testing/bdd";
|
||||
import { expect } from "expect";
|
||||
|
||||
import "fake-indexeddb/auto";
|
||||
|
||||
import z from "zod";
|
||||
|
||||
import { IndexedDB } from "../src/databases/indexeddb/database.ts";
|
||||
import type { DBLogger } from "../src/logger.ts";
|
||||
|
||||
const log: DBLogger = () => {};
|
||||
|
||||
describe("IndexedDB Storage Integration", { sanitizeOps: false, sanitizeResources: false }, () => {
|
||||
let db: IndexedDB<{ name: string; registrars: any[]; log?: DBLogger }>;
|
||||
|
||||
let collection: any;
|
||||
|
||||
beforeAll(async () => {
|
||||
db = new IndexedDB({
|
||||
name: "test-db",
|
||||
registrars: [
|
||||
{
|
||||
name: "users",
|
||||
schema: {
|
||||
id: z.string(),
|
||||
name: z.string().optional(),
|
||||
age: z.number().optional(),
|
||||
},
|
||||
indexes: [
|
||||
{ field: "id", kind: "primary" },
|
||||
{ field: "name", kind: "unique" },
|
||||
],
|
||||
},
|
||||
],
|
||||
log,
|
||||
});
|
||||
|
||||
collection = db.collection("users");
|
||||
|
||||
await collection.storage.resolve();
|
||||
await collection.flush();
|
||||
});
|
||||
|
||||
afterEach(async () => {
|
||||
await db.flush();
|
||||
});
|
||||
|
||||
afterAll(async () => {
|
||||
await db.close();
|
||||
});
|
||||
|
||||
it("should insert and find documents", async () => {
|
||||
await collection.storage.insert([
|
||||
{ id: "1", name: "Alice", age: 30 },
|
||||
{ id: "2", name: "Bob", age: 25 },
|
||||
]);
|
||||
|
||||
const all = await collection.storage.find({});
|
||||
expect(all).toHaveLength(2);
|
||||
|
||||
const alice = await collection.storage.find({ name: "Alice" });
|
||||
expect(alice).toHaveLength(1);
|
||||
expect(alice[0].age).toBe(30);
|
||||
});
|
||||
|
||||
it("should get documents by index", async () => {
|
||||
await collection.storage.insert([{ id: "1", name: "Alice" }]);
|
||||
const byIndex = await collection.storage.getByIndex("id", "1");
|
||||
expect(byIndex).toHaveLength(1);
|
||||
expect(byIndex[0].name).toBe("Alice");
|
||||
});
|
||||
|
||||
it("should update documents", async () => {
|
||||
await collection.storage.insert([{ id: "1", name: "Alice", age: 30 }]);
|
||||
|
||||
const result = await collection.storage.update({ id: "1" }, { $set: { age: 31 } });
|
||||
expect(result.matchedCount).toBe(1);
|
||||
expect(result.modifiedCount).toBe(1);
|
||||
|
||||
const updated = await collection.storage.find({ id: "1" });
|
||||
expect(updated[0].age).toBe(31);
|
||||
});
|
||||
|
||||
it("should remove documents", async () => {
|
||||
await collection.storage.insert([
|
||||
{ id: "1", name: "Alice" },
|
||||
{ id: "2", name: "Bob" },
|
||||
]);
|
||||
|
||||
const removedCount = await collection.storage.remove({ name: "Bob" });
|
||||
expect(removedCount).toBe(1);
|
||||
|
||||
const remaining = await collection.storage.find({});
|
||||
expect(remaining).toHaveLength(1);
|
||||
expect(remaining[0].name).toBe("Alice");
|
||||
});
|
||||
|
||||
it("should count documents", async () => {
|
||||
await collection.storage.insert([
|
||||
{ id: "1", age: 30 },
|
||||
{ id: "2", age: 25 },
|
||||
{ id: "3", age: 30 },
|
||||
]);
|
||||
|
||||
const count = await collection.storage.count({ age: 30 });
|
||||
expect(count).toBe(2);
|
||||
});
|
||||
|
||||
it("should flush the collection", async () => {
|
||||
await collection.storage.insert([{ id: "1", name: "Alice" }]);
|
||||
await collection.flush();
|
||||
|
||||
const all = await collection.storage.find({});
|
||||
expect(all).toHaveLength(0);
|
||||
});
|
||||
});
|
||||
@@ -1,38 +0,0 @@
|
||||
import "fake-indexeddb/auto";
|
||||
|
||||
import { describe, it } from "@std/testing/bdd";
|
||||
import { expect } from "expect";
|
||||
|
||||
import { Collection } from "../src/collection.ts";
|
||||
import { MemoryStorage } from "../src/databases/memory/storage.ts";
|
||||
import { DuplicateDocumentError } from "../src/storage/errors.ts";
|
||||
import { getUsers } from "./users.mock.ts";
|
||||
|
||||
/*
|
||||
|--------------------------------------------------------------------------------
|
||||
| Unit Tests
|
||||
|--------------------------------------------------------------------------------
|
||||
*/
|
||||
|
||||
describe("Storage Insert", () => {
|
||||
it("should successfully insert a new document", async () => {
|
||||
const collection = new Collection("users", new MemoryStorage("users"));
|
||||
const users = getUsers();
|
||||
await collection.insertMany(users);
|
||||
expect(await collection.storage.findById(users[0].id)).toEqual(users[0]);
|
||||
expect(await collection.storage.findById(users[1].id)).toEqual(users[1]);
|
||||
collection.storage.destroy();
|
||||
});
|
||||
|
||||
it("should throw an error if the document already exists", async () => {
|
||||
const collection = new Collection("users", new MemoryStorage("users"));
|
||||
const users = getUsers();
|
||||
try {
|
||||
await collection.insertOne(users[0]);
|
||||
} catch (err) {
|
||||
expect(err instanceof DuplicateDocumentError).toEqual(true);
|
||||
expect(err).toEqual(new DuplicateDocumentError(users[0], collection.storage));
|
||||
}
|
||||
collection.storage.destroy();
|
||||
});
|
||||
});
|
||||
93
tests/memory-storage.test.ts
Normal file
93
tests/memory-storage.test.ts
Normal file
@@ -0,0 +1,93 @@
|
||||
import { describe, it } from "@std/testing/bdd";
|
||||
import { expect } from "expect";
|
||||
|
||||
import { MemoryStorage } from "../src/databases/memory/storage.ts";
|
||||
|
||||
interface TestDoc {
|
||||
id: string;
|
||||
name?: string;
|
||||
age?: number;
|
||||
tags?: string[];
|
||||
}
|
||||
|
||||
describe("MemoryStorage", () => {
|
||||
it("should insert new records", async () => {
|
||||
const storage = new MemoryStorage<TestDoc>("test", [{ field: "id", kind: "primary" }]);
|
||||
const documents: TestDoc[] = [{ id: "abc", name: "Alice", age: 30 }];
|
||||
|
||||
await storage.insert(documents);
|
||||
|
||||
expect(storage.documents).toHaveLength(1);
|
||||
expect(storage.documents[0]).toEqual(documents[0]);
|
||||
});
|
||||
|
||||
it("should retrieve records by index", async () => {
|
||||
const storage = new MemoryStorage<TestDoc>("test", [{ field: "id", kind: "primary" }]);
|
||||
await storage.insert([
|
||||
{ id: "abc", name: "Alice" },
|
||||
{ id: "def", name: "Bob" },
|
||||
]);
|
||||
|
||||
const result = await storage.getByIndex("id", "abc");
|
||||
expect(result).toHaveLength(1);
|
||||
expect(result[0].name).toBe("Alice");
|
||||
});
|
||||
|
||||
it("should find documents by criteria", async () => {
|
||||
const storage = new MemoryStorage<TestDoc>("test", [{ field: "id", kind: "primary" }]);
|
||||
await storage.insert([
|
||||
{ id: "1", name: "Alice", age: 30 },
|
||||
{ id: "2", name: "Bob", age: 25 },
|
||||
{ id: "3", name: "Charlie", age: 30 },
|
||||
]);
|
||||
|
||||
const results = await storage.find({ age: 30 });
|
||||
expect(results).toHaveLength(2);
|
||||
expect(results.map((r) => r.name).sort()).toEqual(["Alice", "Charlie"]);
|
||||
});
|
||||
|
||||
it("should update documents matching a condition", async () => {
|
||||
const storage = new MemoryStorage<TestDoc>("test", [{ field: "id", kind: "primary" }]);
|
||||
await storage.insert([{ id: "1", name: "Alice", age: 30 }]);
|
||||
|
||||
const updateResult = await storage.update({ id: "1" }, { $set: { age: 31 } });
|
||||
expect(updateResult.matchedCount).toBe(1);
|
||||
expect(updateResult.modifiedCount).toBe(1);
|
||||
|
||||
const updated = await storage.find({ id: "1" });
|
||||
expect(updated[0].age).toBe(31);
|
||||
});
|
||||
|
||||
it("should remove documents by condition", async () => {
|
||||
const storage = new MemoryStorage<TestDoc>("test", [{ field: "id", kind: "primary" }]);
|
||||
await storage.insert([
|
||||
{ id: "1", name: "Alice" },
|
||||
{ id: "2", name: "Bob" },
|
||||
]);
|
||||
|
||||
const removedCount = await storage.remove({ name: "Bob" });
|
||||
expect(removedCount).toBe(1);
|
||||
|
||||
const remaining = await storage.find({});
|
||||
expect(remaining).toHaveLength(1);
|
||||
expect(remaining[0].name).toBe("Alice");
|
||||
});
|
||||
|
||||
it("should count documents matching a condition", async () => {
|
||||
const storage = new MemoryStorage<TestDoc>("test", [{ field: "id", kind: "primary" }]);
|
||||
await storage.insert([
|
||||
{ id: "1", name: "Alice", age: 30 },
|
||||
{ id: "2", name: "Bob", age: 25 },
|
||||
{ id: "3", name: "Charlie", age: 30 },
|
||||
]);
|
||||
|
||||
const count = await storage.count({ age: 30 });
|
||||
expect(count).toBe(2);
|
||||
});
|
||||
|
||||
it("should return itself from resolve", async () => {
|
||||
const storage = new MemoryStorage<TestDoc>("test", [{ field: "id", kind: "primary" }]);
|
||||
const resolved = await storage.resolve();
|
||||
expect(resolved).toBe(storage);
|
||||
});
|
||||
});
|
||||
@@ -1,23 +0,0 @@
|
||||
import { describe, it } from "@std/testing/bdd";
|
||||
import { expect } from "expect";
|
||||
|
||||
import { Collection } from "../src/collection.ts";
|
||||
import { MemoryStorage } from "../src/databases/memory/storage.ts";
|
||||
import { RemoveResult } from "../src/storage/operators/remove.ts";
|
||||
import { getUsers } from "./users.mock.ts";
|
||||
|
||||
/*
|
||||
|--------------------------------------------------------------------------------
|
||||
| Unit Tests
|
||||
|--------------------------------------------------------------------------------
|
||||
*/
|
||||
|
||||
describe("Storage Remove", () => {
|
||||
it("should successfully delete document", async () => {
|
||||
const collection = new Collection("users", new MemoryStorage("users"));
|
||||
const users = getUsers();
|
||||
await collection.insertMany(users);
|
||||
expect(await collection.remove({ id: "user-1" })).toEqual(new RemoveResult(1));
|
||||
collection.storage.destroy();
|
||||
});
|
||||
});
|
||||
1446
tests/update.test.ts
1446
tests/update.test.ts
File diff suppressed because it is too large
Load Diff
@@ -1,43 +0,0 @@
|
||||
const users: UserDocument[] = [
|
||||
{
|
||||
id: "user-1",
|
||||
name: "John Doe",
|
||||
email: "john.doe@test.none",
|
||||
friends: [
|
||||
{
|
||||
id: "user-2",
|
||||
alias: "Jane",
|
||||
},
|
||||
],
|
||||
interests: ["movies", "tv", "sports"],
|
||||
},
|
||||
{
|
||||
id: "user-2",
|
||||
name: "Jane Doe",
|
||||
email: "jane.doe@test.none",
|
||||
friends: [
|
||||
{
|
||||
id: "user-1",
|
||||
alias: "John",
|
||||
},
|
||||
],
|
||||
interests: ["movies", "fitness", "dance"],
|
||||
},
|
||||
];
|
||||
|
||||
export function getUsers(): UserDocument[] {
|
||||
return JSON.parse(JSON.stringify(users));
|
||||
}
|
||||
|
||||
export type UserDocument = {
|
||||
id: string;
|
||||
name: string;
|
||||
email: string;
|
||||
friends: Friend[];
|
||||
interests: string[];
|
||||
};
|
||||
|
||||
type Friend = {
|
||||
id: string;
|
||||
alias: string;
|
||||
};
|
||||
Reference in New Issue
Block a user