diff --git a/.github/workflows/publish.yml b/.github/workflows/publish.yml index 82bf8bd..b27135a 100644 --- a/.github/workflows/publish.yml +++ b/.github/workflows/publish.yml @@ -9,7 +9,7 @@ jobs: runs-on: ubuntu-latest permissions: contents: read - id-token: write # The OIDC ID token is used for authentication with JSR. + id-token: write # The OIDC ID token is used for authentication with JSR. steps: - uses: actions/checkout@v4 - uses: denoland/setup-deno@v1 diff --git a/README.md b/README.md index da28b88..0e9802e 100644 --- a/README.md +++ b/README.md @@ -131,37 +131,37 @@ type. Using the standard model strategy: ```ts -import { model } from "jsr:@olli/kvdex" +import { model } from "jsr:@olli/kvdex"; type User = { - username: string - age: number - activities: string[] + username: string; + age: number; + activities: string[]; address?: { - country: string - city: string - street: string - houseNumber: number | null - } -} + country: string; + city: string; + street: string; + houseNumber: number | null; + }; +}; // Normal model (equal input and output) -const UserModel = model() +const UserModel = model(); // Asymmetric model (mapped output) const UserModel = model((user: User) => ({ upperCaseUsername: user.username.toUpperCase(), ageInDecades: user.age / 10, createdAt: new Date(), -})) +})); ``` Using Zod instead: ```ts -import { z } from "npm:zod" +import { z } from "npm:zod"; -type User = z.infer +type User = z.infer; const UserModel = z.object({ username: z.string(), @@ -173,7 +173,7 @@ const UserModel = z.object({ street: z.string(), houseNumber: z.number().nullable(), }).optional(), -}) +}); ``` ## Database @@ -227,29 +227,29 @@ added, which can be useful to create derived ids. The default id generator uses Id created from the data being added: ```ts -import { collection, kvdex, model } from "jsr:@olli/kvdex" +import { collection, kvdex, model } from "jsr:@olli/kvdex"; -const kv = await Deno.openKv() +const kv = await Deno.openKv(); const db = kvdex(kv, { users: collection(model(), { idGenerator: (user) => user.username, }), -}) +}); ``` Using randomely generated uuids: ```ts -import { collection, kvdex, model } from "jsr:@olli/kvdex" +import { collection, kvdex, model } from "jsr:@olli/kvdex"; -const kv = await Deno.openKv() +const kv = await Deno.openKv(); const db = kvdex(kv, { users: collection(model(), { idGenerator: () => crypto.randomUUID(), }), -}) +}); ``` ### `indices` @@ -260,9 +260,9 @@ querying data based on index values. **NOTE:** Index values are always serialized. ```ts -import { collection, kvdex, model } from "jsr:@olli/kvdex" +import { collection, kvdex, model } from "jsr:@olli/kvdex"; -const kv = await Deno.openKv() +const kv = await Deno.openKv(); const db = kvdex(kv, { users: collection(model(), { @@ -271,7 +271,7 @@ const db = kvdex(kv, { age: "secondary", // non-unique }, }), -}) +}); ``` ### `serialize` @@ -321,15 +321,15 @@ const db = kvdex(kv, { Set to `true` to enable version history. Default is `false`. ```ts -import { collection, kvdex, model } from "jsr:@olli/kvdex" +import { collection, kvdex, model } from "jsr:@olli/kvdex"; -const kv = await Deno.openKv() +const kv = await Deno.openKv(); const db = kvdex(kv, { users: collection(model(), { history: true, }), -}) +}); ``` ## Collection Methods @@ -341,13 +341,13 @@ adhere to the type KvId. This method takes an optional options argument that can be used to set the consistency mode. ```ts -const userDoc1 = await db.users.find(123) +const userDoc1 = await db.users.find(123); -const userDoc2 = await db.users.find(123n) +const userDoc2 = await db.users.find(123n); const userDoc3 = await db.users.find("oliver", { consistency: "eventual", // "strong" by default -}) +}); ``` ### findByPrimaryIndex() @@ -356,7 +356,7 @@ Find a document by a primary index. ```ts // Finds a user document with the username = "oliver" -const userByUsername = await db.users.findByPrimaryIndex("username", "oliver") +const userByUsername = await db.users.findByPrimaryIndex("username", "oliver"); ``` ### findBySecondaryIndex() @@ -367,12 +367,12 @@ options argument that can be used for filtering of documents, and pagination. ```ts // Returns all users with age = 24 -const { result } = await db.users.findBySecondaryIndex("age", 24) +const { result } = await db.users.findBySecondaryIndex("age", 24); // Returns all users with age = 24 AND username that starts with "o" const { result } = await db.users.findBySecondaryIndex("age", 24, { filter: (doc) => doc.value.username.startsWith("o"), -}) +}); ``` ### findMany() @@ -382,11 +382,11 @@ ids must adhere to the type KvId. This method takes an optional options argument that can be used to set the consistency mode. ```ts -const userDocs1 = await db.users.findMany(["abc", 123, 123n]) +const userDocs1 = await db.users.findMany(["abc", 123, 123n]); const userDocs2 = await db.users.findMany(["abc", 123, 123n], { consistency: "eventual", // "strong" by default -}) +}); ``` ### findHistory() @@ -396,13 +396,13 @@ timestamp, type of either "write" or "delete", and a copy of the document value if the type is "write". ```ts -const { result } = await db.users.findHistory("user_id") +const { result } = await db.users.findHistory("user_id"); ``` ```ts const { result } = await db.users.findHistory("user_id", { filter: (entry) => entry.type === "write", -}) +}); ``` ### findUndelivered() @@ -412,11 +412,11 @@ in the collection queue. This method takes an optional options argument that can be used to set the consistency mode. ```ts -const doc1 = await db.users.findUndelivered("undelivered_id") +const doc1 = await db.users.findUndelivered("undelivered_id"); const doc2 = await db.users.findUndelivered("undelivered_id", { consistency: "eventual", // "strong" by default -}) +}); ``` ### add() @@ -436,7 +436,7 @@ const result = await db.users.add({ street: "Sesame", houseNumber: null, }, -}) +}); ``` ### addMany() @@ -469,13 +469,13 @@ versionstamp and ok flag. ```ts // Add a new document if the id is not already in use -const result1 = await db.numbers.set("id", 1024) +const result1 = await db.numbers.set("id", 1024); // Overwrite any existing document with the same id -const result2 = await db.numbers.set("id", 2048, { overwrite: true }) +const result2 = await db.numbers.set("id", 2048, { overwrite: true }); if (result1.ok) { - console.log(result.id) // id + console.log(result.id); // id } ``` @@ -488,14 +488,14 @@ the `merge-shallow` strategy is also supported. ```ts // Updates the document with a new value -const result = await db.numbers.update("num1", 42) +const result = await db.numbers.update("num1", 42); // Partial update using merge, only updates the age field const result = await db.users.update( "oliver", { age: 30 }, { strategy: "merge" }, -) +); ``` ### updateByPrimaryIndex() @@ -508,7 +508,7 @@ const result = await db.users.updateByPrimaryIndex( "username", "oliver", { age: 56 }, -) +); // Updates a user document using shallow merge const result = await db.users.updateByPrimaryIndex( @@ -516,7 +516,7 @@ const result = await db.users.updateByPrimaryIndex( "anders", { age: 89 }, { strategy: "merge-shallow" }, -) +); ``` ### updateBySecondaryIndex() @@ -527,7 +527,9 @@ options are given, all documents by the given index value will we updated. ```ts // Updates all user documents with age = 24 and sets age = 67 -const { result } = await db.users.updateBySecondaryIndex("age", 24, { age: 67 }) +const { result } = await db.users.updateBySecondaryIndex("age", 24, { + age: 67, +}); // Updates all users where age = 24 and username starts with "o", using shallow merge const { result } = await db.users.updateBySecondaryIndex( @@ -538,7 +540,7 @@ const { result } = await db.users.updateBySecondaryIndex( filter: (doc) => doc.value.username.startsWith("o"), strategy: "merge-shallow", }, -) +); ``` ### updateMany() @@ -550,16 +552,16 @@ documents in the collection. ```ts // Updates all user documents and sets age = 67 -const { result } = await db.users.updateMany({ age: 67 }) +const { result } = await db.users.updateMany({ age: 67 }); // Updates all users where age > 20, using shallow merge const { result } = await db.users.updateMany({ age: 67 }, { filter: (doc) => doc.value.age > 20, strategy: "merge-shallow", -}) +}); // Only updates first user document and fails the rest when username is a primary index -const { result } = await db.users.updateMany({ username: "oliver" }) +const { result } = await db.users.updateMany({ username: "oliver" }); ``` ### updateManyBySecondaryOrder() @@ -569,7 +571,7 @@ order. ```ts // Updates the first 10 users ordered by age and sets username = "anon" -await db.users.updateManyBySecondaryOrder("age", { username: "anon" }) +await db.users.updateManyBySecondaryOrder("age", { username: "anon" }); ``` ### updateOne() @@ -580,7 +582,7 @@ same `options` argument as `updateMany()`. If no options are given, ```ts // Updates the first user document and sets age = 67 -const result = await db.users.updateOne({ age: 67 }) +const result = await db.users.updateOne({ age: 67 }); ``` ```ts @@ -588,7 +590,7 @@ const result = await db.users.updateOne({ age: 67 }) const result = await db.users.updateOne({ age: 67 }, { filter: (doc) => doc.value.age > 20, strategy: "merge-shallow", -}) +}); ``` ### updateOneBySecondaryIndex() @@ -600,7 +602,7 @@ collection by the given index value. ```ts // Updates the first user document where age = 20 and sets age = 67 -const result = await db.users.updateOneBySecondaryIndex("age", 20, { age: 67 }) +const result = await db.users.updateOneBySecondaryIndex("age", 20, { age: 67 }); ``` ```ts @@ -613,7 +615,7 @@ const result = await db.users.updateOneBySecondaryIndex( filter: (doc) => doc.value.username.startsWith("a"), strategy: "merge-shallow", }, -) +); ``` ### updateOneBySecondaryOrder() @@ -625,7 +627,7 @@ order. // Updates the first user ordered by age and sets username = "anon" const result = await db.users.updateOneBySecondaryOrder("age", { username: "anon", -}) +}); ``` ### upsert() @@ -680,9 +682,9 @@ const result = await db.users.upsertByPrimaryIndex({ Delete one or more documents with the given ids from the KV store. ```ts -await db.users.delete("f897e3cf-bd6d-44ac-8c36-d7ab97a82d77") +await db.users.delete("f897e3cf-bd6d-44ac-8c36-d7ab97a82d77"); -await db.users.delete("user1", "user2", "user3") +await db.users.delete("user1", "user2", "user3"); ``` ### deleteByPrimaryIndex() @@ -691,7 +693,7 @@ Delete a document by a primary index. ```ts // Deletes user with username = "oliver" -await db.users.deleteByPrimaryIndex("username", "oliver") +await db.users.deleteByPrimaryIndex("username", "oliver"); ``` ### deleteBySecondaryIndex() @@ -701,12 +703,12 @@ argument that can be used for filtering of documents, and pagination. ```ts // Deletes all users with age = 24 -await db.users.deleteBySecondaryIndex("age", 24) +await db.users.deleteBySecondaryIndex("age", 24); // Deletes all users with age = 24 AND username that starts with "o" await db.users.deleteBySecondaryIndex("age", 24, { filter: (doc) => doc.value.username.startsWith("o"), -}) +}); ``` ### deleteMany() @@ -718,23 +720,23 @@ documents in the collection. ```ts // Deletes all user documents -await db.users.deleteMany() +await db.users.deleteMany(); // Deletes all user documents where the user's age is above 20 await db.users.deleteMany({ filter: (doc) => doc.value.age > 20, -}) +}); // Deletes the first 10 user documents in the KV store await db.users.deleteMany({ limit: 10, -}) +}); // Deletes the last 10 user documents in the KV store await db.users.deleteMany({ limit: 10, reverse: true, -}) +}); ``` ### deleteManyBySecondaryOrder() @@ -746,7 +748,7 @@ deleted. ```ts // Deletes the first 10 users ordered by age -await db.users.deleteManyBySecondaryOrder("age", { limit: 10 }) +await db.users.deleteManyBySecondaryOrder("age", { limit: 10 }); ``` ### deleteHistory() @@ -754,7 +756,7 @@ await db.users.deleteManyBySecondaryOrder("age", { limit: 10 }) Delete the version history of a document by id. ```ts -await db.users.deleteHistory("user_id") +await db.users.deleteHistory("user_id"); ``` ### deleteUndelivered() @@ -762,7 +764,7 @@ await db.users.deleteHistory("user_id") Delete an undelivered document entry by id from the collection queue. ```ts -await db.users.deleteUndelivered("id") +await db.users.deleteUndelivered("id"); ``` ### getMany() @@ -774,23 +776,23 @@ the collection. ```ts // Retrieves all user documents -const { result } = await db.users.getMany() +const { result } = await db.users.getMany(); // Retrieves all user documents where the user's age is above or equal to 18 const { result } = await db.users.getMany({ filter: (doc) => doc.value.age >= 18, -}) +}); // Retrieves the first 10 user documents in the KV store const { result } = await db.users.getMany({ limit: 10, -}) +}); // Retrieves the last 10 user documents in the KV store const { result } = await db.users.getMany({ limit: 10, reverse: true, -}) +}); ``` ### getManyBySecondaryOrder() @@ -801,12 +803,12 @@ are retrieved. ```ts // Get all users ordered by age -const { result } = await db.users.getManyBySecondaryOrder("age") +const { result } = await db.users.getManyBySecondaryOrder("age"); // Only get users with username that starts with "a", ordered by age const { result } = await db.users.getManyBySecondaryOrder("age", { filter: (doc) => doc.value.username.startsWith("a"), -}) +}); ``` ### getOne() @@ -817,12 +819,12 @@ retrieve the first document in the collection. ```ts // Retrieves the first user document -const user = await db.users.getOne() +const user = await db.users.getOne(); // Retrieves the first user where the user's age is above or equal to 18 const user = await db.users.getOne({ filter: (doc) => doc.value.age > 18, -}) +}); ``` ### getOneBySecondaryIndex() @@ -834,12 +836,12 @@ collection by the given index value. ```ts // Retrieves the first user document where age = 20 -const user = await db.users.getOneBySecondaryIndex("age", 20) +const user = await db.users.getOneBySecondaryIndex("age", 20); // Retrieves the first user where age = 20 and username starts with "a" const user = await db.users.getOneBySecondaryIndex("age", 20, { filter: (doc) => doc.value.username.startsWith("a"), -}) +}); ``` ### getOneBySecondaryOrder() @@ -850,7 +852,7 @@ collection by the given order is retrieved. ```ts // Get the first user ordered by age -const user = await db.users.getOneBySecondaryOrder("age") +const user = await db.users.getOneBySecondaryOrder("age"); ``` ### forEach() @@ -862,23 +864,23 @@ all documents in the collection. ```ts // Log the username of every user document -await db.users.forEach((doc) => console.log(doc.value.username)) +await db.users.forEach((doc) => console.log(doc.value.username)); // Log the username of every user that has "swimming" as an activity await db.users.forEach((doc) => console.log(doc.value.username), { filter: (doc) => doc.value.activities.includes("swimming"), -}) +}); // Log the usernames of the first 10 user documents in the KV store await db.users.forEach((doc) => console.log(doc.value.username), { limit: 10, -}) +}); // Log the usernames of the last 10 user documents in the KV store await db.users.forEach((doc) => console.log(doc.value.username), { limit: 10, reverse: true, -}) +}); ``` ### forEachBySecondaryIndex() @@ -894,7 +896,7 @@ await db.users.forEachBySecondaryIndex( "age", 20, (doc) => console.log(doc.value.username), -) +); ``` ### forEachBySecondaryOrder() @@ -908,7 +910,7 @@ function is executed for all documents. await db.users.forEachBySecondaryOrder( "age", (doc) => console.log(doc.value.username), -) +); ``` ### map() @@ -920,23 +922,23 @@ function will be executed for all documents in the collection. ```ts // Get a list of all the ids of the user documents -const { result } = await db.users.map((doc) => doc.id) +const { result } = await db.users.map((doc) => doc.id); // Get a list of all usernames of users with age > 20 const { result } = await db.users.map((doc) => doc.value.username, { filter: (doc) => doc.value.age > 20, -}) +}); // Get a list of the usernames of the first 10 users in the KV store const { result } = await db.users.forEach((doc) => doc.value.username, { limit: 10, -}) +}); // Get a list of the usernames of the last 10 users in the KV store const { result } = await db.users.forEach((doc) => doc.value.username, { limit: 10, reverse: true, -}) +}); ``` ### mapBySecondaryIndex() @@ -952,7 +954,7 @@ const { result } = await db.users.mapBySecondaryIndex( "age", 20, (doc) => doc.value.username, -) +); ``` ### mapBySecondaryOrder() @@ -967,7 +969,7 @@ are returned as a list. const { result } = await db.users.mapBySecondaryOrder( "age", (doc) => doc.value.username, -) +); ``` ### count() @@ -978,12 +980,12 @@ it will count all documents in the collection. ```ts // Returns the total number of user documents in the KV store -const count = await db.users.count() +const count = await db.users.count(); // Returns the number of users with age > 20 const count = await db.users.count({ filter: (doc) => doc.value.age > 20, -}) +}); ``` ### countBySecondaryIndex() @@ -994,7 +996,7 @@ options are given, it will count all documents matching the index. ```ts // Counts all users where age = 20 -const count = await db.users.countBySecondaryIndex("age", 20) +const count = await db.users.countBySecondaryIndex("age", 20); ``` ### countBySecondaryOrder() @@ -1006,7 +1008,7 @@ Counts the number of documents in the collection by a secondary order. const count = await db.users.countBySecondaryOrder("age", { limit: 10, filter: (doc) => doc.value.age < 18, -}) +}); ``` ### enqueue() @@ -1018,13 +1020,13 @@ argument that can be used to set a delivery delay and topic. ```ts // Immediate delivery -await db.users.enqueue("some data") +await db.users.enqueue("some data"); // Delay of 2 seconds before delivery await db.users.enqueue("cake", { delay: 2_000, topic: "food", -}) +}); ``` ### listenQueue() @@ -1036,19 +1038,19 @@ well as optional options that can be used to set the topic. ```ts // Prints the data to console when recevied -db.users.listenQueue((data) => console.log(data)) +db.users.listenQueue((data) => console.log(data)); // Sends post request when data is received db.users.listenQueue(async (data) => { - const dataBody = JSON.stringify(data) + const dataBody = JSON.stringify(data); const res = await fetch("...", { method: "POST", body: data, - }) + }); - console.log("POSTED:", dataBody, res.ok) -}, { topic: "posts" }) + console.log("POSTED:", dataBody, res.ok); +}, { topic: "posts" }); ``` ### watch() @@ -1057,13 +1059,13 @@ Listen for live changes to a single document by id. ```ts // Updates the document value every second -setInterval(() => db.numbers.set("id", Math.random()), 1_000) +setInterval(() => db.numbers.set("id", Math.random()), 1_000); // Listen for any updates to the document value db.numbers.watch("id", (doc) => { // Document will be null if the latest update was a delete operation - console.log(doc?.value) -}) + console.log(doc?.value); +}); ``` Watchers can also be stopped. @@ -1071,10 +1073,10 @@ Watchers can also be stopped. ```ts const { promise, cancel } = db.numbers.watch("id", (doc) => { // ... -}) +}); -await cancel() -await promise +await cancel(); +await promise; ``` ### watchMany() @@ -1083,17 +1085,17 @@ Listen for live changes to an array of specified documents by id. ```ts // Delayed setting of document values -setTimeout(() => db.numbers.set("id1", 10), 1_000) -setTimeout(() => db.numbers.set("id2", 20), 2_000) -setTimeout(() => db.numbers.set("id3", 30), 3_000) +setTimeout(() => db.numbers.set("id1", 10), 1_000); +setTimeout(() => db.numbers.set("id2", 20), 2_000); +setTimeout(() => db.numbers.set("id3", 30), 3_000); // Listen for any updates to the document values db.numbers.watchMany(["id1", "id2", "id3"], (docs) => { // Prints for each update to any of the documents - console.log(docs[0]?.value) // 10, 10, 10 - console.log(docs[1]?.value) // null, 20, 20 - console.log(docs[2]?.value) // null, null, 30 -}) + console.log(docs[0]?.value); // 10, 10, 10 + console.log(docs[1]?.value); // null, 20, 20 + console.log(docs[2]?.value); // null, null, 30 +}); ``` Watchers can also be stopped. @@ -1104,10 +1106,10 @@ const { promise, cancel } = db.numbers.watchMany( (docs) => { // ... }, -) +); -await cancel() -await promise +await cancel(); +await promise; ``` ## Database Methods @@ -1122,7 +1124,7 @@ options argument that can be used to set the consistency mode. ```ts // Gets the total number of documents in the KV store across all collections -const count = await db.countAll() +const count = await db.countAll(); ``` ### deleteAll() @@ -1130,7 +1132,7 @@ const count = await db.countAll() Delete all documents across all collections. ```ts -await db.deleteAll() +await db.deleteAll(); ``` ### wipe() @@ -1138,7 +1140,7 @@ await db.deleteAll() Delete all kvdex entries, including undelivered and history entries. ```ts -await db.wipe() +await db.wipe(); ``` ### deleteUndelivered() @@ -1146,7 +1148,7 @@ await db.wipe() Delete an undelivered document entry by id from the database queue. ```ts -await db.deleteUndelivered("id") +await db.deleteUndelivered("id"); ``` ### findUndelivered() @@ -1156,11 +1158,11 @@ in the database queue. This method takes an optional options argument that can be used to set the consistency mode. ```ts -const doc1 = await db.findUndelivered("undelivered_id") +const doc1 = await db.findUndelivered("undelivered_id"); const doc2 = await db.findUndelivered("undelivered_id", { consistency: "eventual", // "strong" by default -}) +}); ``` ### enqueue() @@ -1172,13 +1174,13 @@ argument that can be used to set a delivery delay and topic. ```ts // Immediate delivery -await db.enqueue("some data") +await db.enqueue("some data"); // Delay of 2 seconds before delivery await db.enqueue("cake", { delay: 2_000, topic: "food", -}) +}); ``` ### listenQueue() @@ -1190,19 +1192,19 @@ can be used to set the topic. ```ts // Prints the data to console when recevied -db.listenQueue((data) => console.log(data)) +db.listenQueue((data) => console.log(data)); // Sends post request when data is received in the "posts" topic db.listenQueue(async (data) => { - const dataBody = JSON.stringify(data) + const dataBody = JSON.stringify(data); const res = await fetch("...", { method: "POST", body: data, - }) + }); - console.log("POSTED:", dataBody, res.ok) -}, { topic: "posts" }) + console.log("POSTED:", dataBody, res.ok); +}, { topic: "posts" }); ``` ### setInterval() @@ -1215,7 +1217,7 @@ before the first delivery. ```ts // Will repeat indefinitely with 1 second interval -db.setInterval(() => console.log("Hello World!"), 1_000) +db.setInterval(() => console.log("Hello World!"), 1_000); // First callback starts after a 10 second delay, after that there is a random interval between 0 and 5 seconds db.setInterval( @@ -1228,7 +1230,7 @@ db.setInterval( // Count starts at 0 and is given before the current callback is run while: ({ count }) => count < 10, }, -) +); ``` ### loop() @@ -1241,13 +1243,13 @@ ensure the queue listener is registered before the first delivery. ```ts // Sequentially prints "Hello World!" indefinitely with no delay between each iteration -db.loop(() => console.log("Hello World!")) +db.loop(() => console.log("Hello World!")); // Sequentially prints "Hello World!" 10 times, with a 3 second delay between each iteration db.loop(() => console.log("Hello World!"), { delay: 3_000, while: ({ count }) => count < 10, -}) +}); ``` ### atomic() @@ -1256,7 +1258,7 @@ Initiate an atomic operation. The method takes a selector function as argument for selecting the initial collection context. ```ts -db.atomic((schema) => schema.users) +db.atomic((schema) => schema.users); ``` ## Atomic Operations @@ -1293,7 +1295,7 @@ const result1 = await db .atomic((schema) => schema.numbers) .delete("id_1") .set("id_2", 100) - .commit() + .commit(); // Adds 2 new entries to the numbers collection and 1 new entry to the users collection const result2 = await db @@ -1312,7 +1314,7 @@ const result2 = await db houseNumber: 42, }, }) - .commit() + .commit(); // Will fail and return Deno.KvCommitError because it is trying // to both add and delete from an indexable collection @@ -1330,16 +1332,16 @@ const result3 = await db houseNumber: 42, }, }) - .commit() + .commit(); ``` ### With checking ```ts // Only adds 10 to the value when it has not been changed since being read -let result = null +let result = null; while (!result || !result.ok) { - const { id, versionstamp, value } = await db.numbers.find("id") + const { id, versionstamp, value } = await db.numbers.find("id"); result = await db .atomic((schema) => schema.numbers) @@ -1348,7 +1350,7 @@ while (!result || !result.ok) { versionstamp, }) .set(id, value + 10) - .commit() + .commit(); } ``` @@ -1365,8 +1367,8 @@ the document data. ```ts // We assume the document exists in the KV store -const doc = await db.users.find(123n) -const flattened = doc.flat() +const doc = await db.users.find(123n); +const flattened = doc.flat(); // Document: // { @@ -1393,13 +1395,13 @@ of `kvdex`. Serialize a JSON-like value to a Uint8Array. ```ts -import { jsonSerialize } from "@olli/kvdex" +import { jsonSerialize } from "@olli/kvdex"; const serialized = jsonSerialize({ foo: "foo", bar: "bar", bigint: 10n, -}) +}); ``` ### jsonDeserialize() @@ -1407,15 +1409,15 @@ const serialized = jsonSerialize({ Deserialize a value that was serialized using `jsonSerialize()`. ```ts -import { jsonDeserialize, jsonSerialize } from "@olli/kvdex" +import { jsonDeserialize, jsonSerialize } from "@olli/kvdex"; const serialized = jsonSerialize({ foo: "foo", bar: "bar", bigint: 10n, -}) +}); -const value = jsonDeserialize(serialized) +const value = jsonDeserialize(serialized); ``` ### jsonStringify() @@ -1423,13 +1425,13 @@ const value = jsonDeserialize(serialized) Stringify a JSON-like value. ```ts -import { jsonStringify } from "@olli/kvdex" +import { jsonStringify } from "@olli/kvdex"; const str = jsonStringify({ foo: "foo", bar: "bar", bigint: 10n, -}) +}); ``` ### jsonParse() @@ -1437,15 +1439,15 @@ const str = jsonStringify({ Parse a value that was stringified using `jsonStringify()` ```ts -import { jsonParse, jsonStringify } from "@olli/kvdex" +import { jsonParse, jsonStringify } from "@olli/kvdex"; const str = jsonStringify({ foo: "foo", bar: "bar", bigint: 10n, -}) +}); -const value = jsonParse(str) +const value = jsonParse(str); ``` ## Extensions @@ -1465,18 +1467,18 @@ KvValue, KvObject and KvArray. This makes it easier to properly build your schemas. ```ts -import { z } from "npm:zod" -import { KvIdSchema } from "jsr:@olli/kvdex/zod" +import { z } from "npm:zod"; +import { KvIdSchema } from "jsr:@olli/kvdex/zod"; const UserSchema = z.object({ username: z.string(), postIds: z.array(KvIdSchema), -}) +}); const PostSchema = z.object({ text: z.string(), userId: KvIdSchema, -}) +}); ``` ### Migrate @@ -1500,15 +1502,15 @@ Use the migrate function and pass a source KV instance and a target KV instance. Optionally pass `all: true` to migrate all entries. ```ts -import { migrate } from "jsr:@olli/kvdex/migrate" +import { migrate } from "jsr:@olli/kvdex/migrate"; -const source = await Deno.openKv("./source.sqlite3") -const target = await Deno.openKv("./target.sqlite3") +const source = await Deno.openKv("./source.sqlite3"); +const target = await Deno.openKv("./target.sqlite3"); await migrate({ source, target, -}) +}); ``` ### KV @@ -1518,23 +1520,23 @@ used to employ `kvdex` in the browser or other environments where Deno's KV store is not available, or to adapt to other database backends. ```ts -import { kvdex } from "@olli/kvdex" -import { MapKv } from "@olli/kvdex/kv" +import { kvdex } from "@olli/kvdex"; +import { MapKv } from "@olli/kvdex/kv"; // Create a database from a `MapKv` instance, using `Map` as it's backend by default. -const kv = new MapKv() // Equivalent to `new MapKv({ map: new Map() })` -const db = kvdex(kv, {}) +const kv = new MapKv(); // Equivalent to `new MapKv({ map: new Map() })` +const db = kvdex(kv, {}); ``` ```ts -import { kvdex } from "@olli/kvdex" -import { MapKv, StorageAdapter } from "@olli/kvdex/kv" +import { kvdex } from "@olli/kvdex"; +import { MapKv, StorageAdapter } from "@olli/kvdex/kv"; // Create an ephimeral database from a `MapKv` instance, // explicitly using `localStorage` as it's backend. -const map = new StorageAdapter(localStorage) -const kv = new MapKv({ map, clearOnClose: true }) -const db = kvdex(kv, {}) +const map = new StorageAdapter(localStorage); +const kv = new MapKv({ map, clearOnClose: true }); +const db = kvdex(kv, {}); ``` ## Blob Storage diff --git a/benchmarks/collection/add.bench.ts b/benchmarks/collection/add.bench.ts index be71912..86d30df 100644 --- a/benchmarks/collection/add.bench.ts +++ b/benchmarks/collection/add.bench.ts @@ -1,10 +1,10 @@ -import { mockUser1 } from "../../tests/mocks.ts" -import { useDb } from "../../tests/utils.ts" +import { mockUser1 } from "../../tests/mocks.ts"; +import { useDb } from "../../tests/utils.ts"; Deno.bench("collection - add", async (b) => { await useDb(async (db) => { - b.start() - await db.users.add(mockUser1) - b.end() - }) -}) + b.start(); + await db.users.add(mockUser1); + b.end(); + }); +}); diff --git a/benchmarks/collection/count.bench.ts b/benchmarks/collection/count.bench.ts index 1170cc0..815b8df 100644 --- a/benchmarks/collection/count.bench.ts +++ b/benchmarks/collection/count.bench.ts @@ -1,12 +1,12 @@ -import { generateUsers, useDb } from "../../tests/utils.ts" +import { generateUsers, useDb } from "../../tests/utils.ts"; Deno.bench("collection - count [1_000]", async (b) => { await useDb(async (db) => { - const users = generateUsers(1_000) - await db.users.addMany(users) + const users = generateUsers(1_000); + await db.users.addMany(users); - b.start() - await db.users.count() - b.end() - }) -}) + b.start(); + await db.users.count(); + b.end(); + }); +}); diff --git a/benchmarks/collection/delete.bench.ts b/benchmarks/collection/delete.bench.ts index 657041c..6fd6ff5 100644 --- a/benchmarks/collection/delete.bench.ts +++ b/benchmarks/collection/delete.bench.ts @@ -1,13 +1,13 @@ -import { mockUser1 } from "../../tests/mocks.ts" -import { useDb } from "../../tests/utils.ts" +import { mockUser1 } from "../../tests/mocks.ts"; +import { useDb } from "../../tests/utils.ts"; Deno.bench("collection - delete [1]", async (b) => { await useDb(async (db) => { - const id = crypto.randomUUID() - await db.users.set(id, mockUser1) + const id = crypto.randomUUID(); + await db.users.set(id, mockUser1); - b.start() - await db.users.delete(id) - b.end() - }) -}) + b.start(); + await db.users.delete(id); + b.end(); + }); +}); diff --git a/benchmarks/collection/deleteMany.bench.ts b/benchmarks/collection/deleteMany.bench.ts index 9c343e6..e3489ca 100644 --- a/benchmarks/collection/deleteMany.bench.ts +++ b/benchmarks/collection/deleteMany.bench.ts @@ -1,12 +1,12 @@ -import { generateUsers, useDb } from "../../tests/utils.ts" +import { generateUsers, useDb } from "../../tests/utils.ts"; Deno.bench("collection - deleteMany - [1_000]", async (b) => { await useDb(async (db) => { - const users = generateUsers(1_000) - await db.users.addMany(users) + const users = generateUsers(1_000); + await db.users.addMany(users); - b.start() - await db.users.deleteMany() - b.end() - }) -}) + b.start(); + await db.users.deleteMany(); + b.end(); + }); +}); diff --git a/benchmarks/collection/find.bench.ts b/benchmarks/collection/find.bench.ts index 770688b..04452a6 100644 --- a/benchmarks/collection/find.bench.ts +++ b/benchmarks/collection/find.bench.ts @@ -1,13 +1,13 @@ -import { mockUser1 } from "../../tests/mocks.ts" -import { useDb } from "../../tests/utils.ts" +import { mockUser1 } from "../../tests/mocks.ts"; +import { useDb } from "../../tests/utils.ts"; Deno.bench("collection - find", async (b) => { await useDb(async (db) => { - const id = "id" - await db.users.set(id, mockUser1) + const id = "id"; + await db.users.set(id, mockUser1); - b.start() - await db.users.find(id) - b.end() - }) -}) + b.start(); + await db.users.find(id); + b.end(); + }); +}); diff --git a/benchmarks/collection/findMany.bench.ts b/benchmarks/collection/findMany.bench.ts index e93b244..37d2123 100644 --- a/benchmarks/collection/findMany.bench.ts +++ b/benchmarks/collection/findMany.bench.ts @@ -1,18 +1,18 @@ -import { generateUsers, useDb } from "../../tests/utils.ts" +import { generateUsers, useDb } from "../../tests/utils.ts"; Deno.bench("collection - findMany [1_000]", async (b) => { await useDb(async (db) => { - const users = generateUsers(1_000) - const ids: string[] = [] + const users = generateUsers(1_000); + const ids: string[] = []; for (const user of users) { - const id = crypto.randomUUID() - await db.users.set(id, user) - ids.push(id) + const id = crypto.randomUUID(); + await db.users.set(id, user); + ids.push(id); } - b.start() - await db.users.findMany(ids) - b.end() - }) -}) + b.start(); + await db.users.findMany(ids); + b.end(); + }); +}); diff --git a/benchmarks/collection/getMany.bench.ts b/benchmarks/collection/getMany.bench.ts index 46d37dc..ab9eadd 100644 --- a/benchmarks/collection/getMany.bench.ts +++ b/benchmarks/collection/getMany.bench.ts @@ -1,12 +1,12 @@ -import { generateUsers, useDb } from "../../tests/utils.ts" +import { generateUsers, useDb } from "../../tests/utils.ts"; Deno.bench("collection - getMany [1_000]", async (b) => { await useDb(async (db) => { - const users = generateUsers(1_000) - await db.users.addMany(users) + const users = generateUsers(1_000); + await db.users.addMany(users); - b.start() - await db.users.getMany() - b.end() - }) -}) + b.start(); + await db.users.getMany(); + b.end(); + }); +}); diff --git a/benchmarks/collection/update.bench.ts b/benchmarks/collection/update.bench.ts index 6f82151..c2e02d2 100644 --- a/benchmarks/collection/update.bench.ts +++ b/benchmarks/collection/update.bench.ts @@ -1,24 +1,24 @@ -import { mockUser1 } from "../../tests/mocks.ts" -import type { User } from "../../tests/models.ts" -import { useDb } from "../../tests/utils.ts" +import { mockUser1 } from "../../tests/mocks.ts"; +import type { User } from "../../tests/models.ts"; +import { useDb } from "../../tests/utils.ts"; Deno.bench("collection - update (replace)", async (b) => { await useDb(async (db) => { - const id = "id" - await db.u64s.set(id, new Deno.KvU64(100n)) + const id = "id"; + await db.u64s.set(id, new Deno.KvU64(100n)); - const updateData = new Deno.KvU64(200n) + const updateData = new Deno.KvU64(200n); - b.start() - await db.u64s.update(id, updateData) - b.end() - }) -}) + b.start(); + await db.u64s.update(id, updateData); + b.end(); + }); +}); Deno.bench("collection - update (shallow merge)", async (b) => { await useDb(async (db) => { - const id = "id" - await db.users.set(id, mockUser1) + const id = "id"; + await db.users.set(id, mockUser1); const updateData: Partial = { address: { @@ -27,18 +27,18 @@ Deno.bench("collection - update (shallow merge)", async (b) => { street: "Sesame Street", houseNr: null, }, - } + }; - b.start() - await db.users.update(id, updateData, { strategy: "merge-shallow" }) - b.end() - }) -}) + b.start(); + await db.users.update(id, updateData, { strategy: "merge-shallow" }); + b.end(); + }); +}); Deno.bench("collection - update (deep merge)", async (b) => { await useDb(async (db) => { - const id = "id" - await db.users.set(id, mockUser1) + const id = "id"; + await db.users.set(id, mockUser1); const updateData: Partial = { address: { @@ -47,10 +47,10 @@ Deno.bench("collection - update (deep merge)", async (b) => { street: "Sesame Street", houseNr: null, }, - } + }; - b.start() - await db.users.update(id, updateData, { strategy: "merge" }) - b.end() - }) -}) + b.start(); + await db.users.update(id, updateData, { strategy: "merge" }); + b.end(); + }); +}); diff --git a/benchmarks/db/countAll.bench.ts b/benchmarks/db/countAll.bench.ts index 152b98f..4440f83 100644 --- a/benchmarks/db/countAll.bench.ts +++ b/benchmarks/db/countAll.bench.ts @@ -1,17 +1,17 @@ -import { generateLargeUsers, generateUsers, useDb } from "../../tests/utils.ts" +import { generateLargeUsers, generateUsers, useDb } from "../../tests/utils.ts"; Deno.bench("db - deleteAll", async (b) => { await useDb(async (db) => { - const users = generateUsers(1_000) - const largeUsers = generateLargeUsers(1_000) + const users = generateUsers(1_000); + const largeUsers = generateLargeUsers(1_000); - await db.users.addMany(users) - await db.i_users.addMany(users) - await db.s_users.addMany(largeUsers) - await db.is_users.addMany(largeUsers) + await db.users.addMany(users); + await db.i_users.addMany(users); + await db.s_users.addMany(largeUsers); + await db.is_users.addMany(largeUsers); - b.start() - await db.countAll() - b.end() - }) -}) + b.start(); + await db.countAll(); + b.end(); + }); +}); diff --git a/benchmarks/db/deleteAll.bench.ts b/benchmarks/db/deleteAll.bench.ts index 58075bc..77fc59c 100644 --- a/benchmarks/db/deleteAll.bench.ts +++ b/benchmarks/db/deleteAll.bench.ts @@ -1,17 +1,17 @@ -import { useDb } from "../../tests/utils.ts" +import { useDb } from "../../tests/utils.ts"; Deno.bench("db - deleteAll", async (b) => { await useDb(async (db) => { - const u64s: Deno.KvU64[] = [] + const u64s: Deno.KvU64[] = []; for (let i = 0; i < 1_000; i++) { - u64s.push(new Deno.KvU64(100n)) + u64s.push(new Deno.KvU64(100n)); } - await db.u64s.addMany(u64s) + await db.u64s.addMany(u64s); - b.start() - await db.deleteAll() - b.end() - }) -}) + b.start(); + await db.deleteAll(); + b.end(); + }); +}); diff --git a/benchmarks/db/kvdex.bench.ts b/benchmarks/db/kvdex.bench.ts index 4732377..dc346c6 100644 --- a/benchmarks/db/kvdex.bench.ts +++ b/benchmarks/db/kvdex.bench.ts @@ -1,9 +1,9 @@ -import { collection, kvdex, model } from "../../mod.ts" -import { useKv } from "../../tests/utils.ts" +import { collection, kvdex, model } from "../../mod.ts"; +import { useKv } from "../../tests/utils.ts"; Deno.bench("db - kvdex (10 collections)", async (b) => { await useKv((kv) => { - b.start() + b.start(); kvdex(kv, { 1: collection(model()), @@ -16,15 +16,15 @@ Deno.bench("db - kvdex (10 collections)", async (b) => { 8: collection(model()), 9: collection(model()), 10: collection(model()), - }) + }); - b.end() - }) -}) + b.end(); + }); +}); Deno.bench("db - kvdex (100 collections)", async (b) => { await useKv((kv) => { - b.start() + b.start(); kvdex(kv, { 1: collection(model()), @@ -127,8 +127,8 @@ Deno.bench("db - kvdex (100 collections)", async (b) => { 98: collection(model()), 99: collection(model()), 100: collection(model()), - }) + }); - b.end() - }) -}) + b.end(); + }); +}); diff --git a/benchmarks/indexable_collection/add.bench.ts b/benchmarks/indexable_collection/add.bench.ts index 974ff71..af7d321 100644 --- a/benchmarks/indexable_collection/add.bench.ts +++ b/benchmarks/indexable_collection/add.bench.ts @@ -1,10 +1,10 @@ -import { mockUser1 } from "../../tests/mocks.ts" -import { useDb } from "../../tests/utils.ts" +import { mockUser1 } from "../../tests/mocks.ts"; +import { useDb } from "../../tests/utils.ts"; Deno.bench("indexable_collection - add", async (b) => { await useDb(async (db) => { - b.start() - await db.i_users.add(mockUser1) - b.end() - }) -}) + b.start(); + await db.i_users.add(mockUser1); + b.end(); + }); +}); diff --git a/benchmarks/indexable_collection/count.bench.ts b/benchmarks/indexable_collection/count.bench.ts index 1058de8..dc8cf92 100644 --- a/benchmarks/indexable_collection/count.bench.ts +++ b/benchmarks/indexable_collection/count.bench.ts @@ -1,12 +1,12 @@ -import { generateUsers, useDb } from "../../tests/utils.ts" +import { generateUsers, useDb } from "../../tests/utils.ts"; Deno.bench("indexable_collection - count [1_000]", async (b) => { await useDb(async (db) => { - const users = generateUsers(1_000) - await db.i_users.addMany(users) + const users = generateUsers(1_000); + await db.i_users.addMany(users); - b.start() - await db.i_users.count() - b.end() - }) -}) + b.start(); + await db.i_users.count(); + b.end(); + }); +}); diff --git a/benchmarks/indexable_collection/delete.bench.ts b/benchmarks/indexable_collection/delete.bench.ts index 94ef20e..2588a46 100644 --- a/benchmarks/indexable_collection/delete.bench.ts +++ b/benchmarks/indexable_collection/delete.bench.ts @@ -1,13 +1,13 @@ -import { mockUser1 } from "../../tests/mocks.ts" -import { useDb } from "../../tests/utils.ts" +import { mockUser1 } from "../../tests/mocks.ts"; +import { useDb } from "../../tests/utils.ts"; Deno.bench("indexable_collection - delete [1]", async (b) => { await useDb(async (db) => { - const id = crypto.randomUUID() - await db.i_users.set(id, mockUser1) + const id = crypto.randomUUID(); + await db.i_users.set(id, mockUser1); - b.start() - await db.i_users.delete(id) - b.end() - }) -}) + b.start(); + await db.i_users.delete(id); + b.end(); + }); +}); diff --git a/benchmarks/indexable_collection/deleteByPrimaryIndex.bench.ts b/benchmarks/indexable_collection/deleteByPrimaryIndex.bench.ts index 11e3c2c..b2b989a 100644 --- a/benchmarks/indexable_collection/deleteByPrimaryIndex.bench.ts +++ b/benchmarks/indexable_collection/deleteByPrimaryIndex.bench.ts @@ -1,12 +1,12 @@ -import { mockUser1 } from "../../tests/mocks.ts" -import { useDb } from "../../tests/utils.ts" +import { mockUser1 } from "../../tests/mocks.ts"; +import { useDb } from "../../tests/utils.ts"; Deno.bench("indexable_collection - deleteByPrimaryIndex", async (b) => { await useDb(async (db) => { - await db.i_users.add(mockUser1) + await db.i_users.add(mockUser1); - b.start() - await db.i_users.deleteByPrimaryIndex("username", mockUser1.username) - b.end() - }) -}) + b.start(); + await db.i_users.deleteByPrimaryIndex("username", mockUser1.username); + b.end(); + }); +}); diff --git a/benchmarks/indexable_collection/deleteMany.bench.ts b/benchmarks/indexable_collection/deleteMany.bench.ts index 7f8fda8..0bc2e1f 100644 --- a/benchmarks/indexable_collection/deleteMany.bench.ts +++ b/benchmarks/indexable_collection/deleteMany.bench.ts @@ -1,12 +1,12 @@ -import { generateUsers, useDb } from "../../tests/utils.ts" +import { generateUsers, useDb } from "../../tests/utils.ts"; Deno.bench("indexable_collection - deleteMany [1_000]", async (b) => { await useDb(async (db) => { - const users = generateUsers(1_000) - await db.i_users.addMany(users) + const users = generateUsers(1_000); + await db.i_users.addMany(users); - b.start() - await db.i_users.deleteMany() - b.end() - }) -}) + b.start(); + await db.i_users.deleteMany(); + b.end(); + }); +}); diff --git a/benchmarks/indexable_collection/find.bench.ts b/benchmarks/indexable_collection/find.bench.ts index 5c84b71..b78472a 100644 --- a/benchmarks/indexable_collection/find.bench.ts +++ b/benchmarks/indexable_collection/find.bench.ts @@ -1,13 +1,13 @@ -import { mockUser1 } from "../../tests/mocks.ts" -import { useDb } from "../../tests/utils.ts" +import { mockUser1 } from "../../tests/mocks.ts"; +import { useDb } from "../../tests/utils.ts"; Deno.bench("indexable_collection - find", async (b) => { await useDb(async (db) => { - const id = "id" - await db.i_users.set(id, mockUser1) + const id = "id"; + await db.i_users.set(id, mockUser1); - b.start() - await db.i_users.find(id) - b.end() - }) -}) + b.start(); + await db.i_users.find(id); + b.end(); + }); +}); diff --git a/benchmarks/indexable_collection/findByPrimaryIndex.bench.ts b/benchmarks/indexable_collection/findByPrimaryIndex.bench.ts index 6a3ee94..0c565d0 100644 --- a/benchmarks/indexable_collection/findByPrimaryIndex.bench.ts +++ b/benchmarks/indexable_collection/findByPrimaryIndex.bench.ts @@ -1,12 +1,12 @@ -import { mockUser1 } from "../../tests/mocks.ts" -import { useDb } from "../../tests/utils.ts" +import { mockUser1 } from "../../tests/mocks.ts"; +import { useDb } from "../../tests/utils.ts"; Deno.bench("indexable_collection - findByPrimaryIndex", async (b) => { await useDb(async (db) => { - await db.i_users.add(mockUser1) + await db.i_users.add(mockUser1); - b.start() - await db.i_users.findByPrimaryIndex("username", mockUser1.username) - b.end() - }) -}) + b.start(); + await db.i_users.findByPrimaryIndex("username", mockUser1.username); + b.end(); + }); +}); diff --git a/benchmarks/indexable_collection/findBySecondaryIndex.bench.ts b/benchmarks/indexable_collection/findBySecondaryIndex.bench.ts index 01a398c..4832ef0 100644 --- a/benchmarks/indexable_collection/findBySecondaryIndex.bench.ts +++ b/benchmarks/indexable_collection/findBySecondaryIndex.bench.ts @@ -1,12 +1,12 @@ -import { generateUsers, useDb } from "../../tests/utils.ts" +import { generateUsers, useDb } from "../../tests/utils.ts"; Deno.bench("indexable_collection - findBySecondaryIndex [1_000]", async (b) => { await useDb(async (db) => { - const users = generateUsers(1_000) - await db.i_users.addMany(users) + const users = generateUsers(1_000); + await db.i_users.addMany(users); - b.start() - await db.i_users.findBySecondaryIndex("age", users[0].age) - b.end() - }) -}) + b.start(); + await db.i_users.findBySecondaryIndex("age", users[0].age); + b.end(); + }); +}); diff --git a/benchmarks/indexable_collection/findMany.bench.ts b/benchmarks/indexable_collection/findMany.bench.ts index 0083e7c..e2856c5 100644 --- a/benchmarks/indexable_collection/findMany.bench.ts +++ b/benchmarks/indexable_collection/findMany.bench.ts @@ -1,18 +1,18 @@ -import { generateUsers, useDb } from "../../tests/utils.ts" +import { generateUsers, useDb } from "../../tests/utils.ts"; Deno.bench("indexable_collection - findMany [1_000]", async (b) => { await useDb(async (db) => { - const users = generateUsers(1_000) - const ids: string[] = [] + const users = generateUsers(1_000); + const ids: string[] = []; for (const user of users) { - const id = crypto.randomUUID() - await db.i_users.set(id, user) - ids.push(id) + const id = crypto.randomUUID(); + await db.i_users.set(id, user); + ids.push(id); } - b.start() - await db.i_users.findMany(ids) - b.end() - }) -}) + b.start(); + await db.i_users.findMany(ids); + b.end(); + }); +}); diff --git a/benchmarks/indexable_collection/getMany.bench.ts b/benchmarks/indexable_collection/getMany.bench.ts index c89b87e..e9602ad 100644 --- a/benchmarks/indexable_collection/getMany.bench.ts +++ b/benchmarks/indexable_collection/getMany.bench.ts @@ -1,12 +1,12 @@ -import { generateUsers, useDb } from "../../tests/utils.ts" +import { generateUsers, useDb } from "../../tests/utils.ts"; Deno.bench("indexable_collection - getMany [1_000]", async (b) => { await useDb(async (db) => { - const users = generateUsers(1_000) - await db.i_users.addMany(users) + const users = generateUsers(1_000); + await db.i_users.addMany(users); - b.start() - await db.i_users.getMany() - b.end() - }) -}) + b.start(); + await db.i_users.getMany(); + b.end(); + }); +}); diff --git a/benchmarks/indexable_collection/update.bench.ts b/benchmarks/indexable_collection/update.bench.ts index 5e6874b..a2b196c 100644 --- a/benchmarks/indexable_collection/update.bench.ts +++ b/benchmarks/indexable_collection/update.bench.ts @@ -1,11 +1,11 @@ -import { mockUser1 } from "../../tests/mocks.ts" -import type { User } from "../../tests/models.ts" -import { useDb } from "../../tests/utils.ts" +import { mockUser1 } from "../../tests/mocks.ts"; +import type { User } from "../../tests/models.ts"; +import { useDb } from "../../tests/utils.ts"; Deno.bench("indexable_collection - update (shallow merge)", async (b) => { await useDb(async (db) => { - const id = "id" - await db.i_users.set(id, mockUser1) + const id = "id"; + await db.i_users.set(id, mockUser1); const updateData: Partial = { address: { @@ -14,18 +14,18 @@ Deno.bench("indexable_collection - update (shallow merge)", async (b) => { street: "Sesame Street", houseNr: null, }, - } + }; - b.start() - await db.i_users.update(id, updateData, { strategy: "merge-shallow" }) - b.end() - }) -}) + b.start(); + await db.i_users.update(id, updateData, { strategy: "merge-shallow" }); + b.end(); + }); +}); Deno.bench("collection - update (deep merge)", async (b) => { await useDb(async (db) => { - const id = "id" - await db.i_users.set(id, mockUser1) + const id = "id"; + await db.i_users.set(id, mockUser1); const updateData: Partial = { address: { @@ -34,10 +34,10 @@ Deno.bench("collection - update (deep merge)", async (b) => { street: "Sesame Street", houseNr: null, }, - } + }; - b.start() - await db.i_users.update(id, updateData, { strategy: "merge" }) - b.end() - }) -}) + b.start(); + await db.i_users.update(id, updateData, { strategy: "merge" }); + b.end(); + }); +}); diff --git a/benchmarks/indexable_collection/updateByPrimaryIndex.bench.ts b/benchmarks/indexable_collection/updateByPrimaryIndex.bench.ts index 715e2d9..45b82d0 100644 --- a/benchmarks/indexable_collection/updateByPrimaryIndex.bench.ts +++ b/benchmarks/indexable_collection/updateByPrimaryIndex.bench.ts @@ -1,12 +1,12 @@ -import { mockUser1 } from "../../tests/mocks.ts" -import type { User } from "../../tests/models.ts" -import { useDb } from "../../tests/utils.ts" +import { mockUser1 } from "../../tests/mocks.ts"; +import type { User } from "../../tests/models.ts"; +import { useDb } from "../../tests/utils.ts"; Deno.bench( "indexable_collection - updateByPrimaryIndex (shallow merge)", async (b) => { await useDb(async (db) => { - await db.i_users.add(mockUser1) + await db.i_users.add(mockUser1); const updateData: Partial = { address: { @@ -15,25 +15,25 @@ Deno.bench( street: "Sesame Street", houseNr: null, }, - } + }; - b.start() + b.start(); await db.i_users.updateByPrimaryIndex( "username", mockUser1.username, updateData, { strategy: "merge-shallow" }, - ) + ); - b.end() - }) + b.end(); + }); }, -) +); Deno.bench("collection - updateByPrimaryIndex (deep merge)", async (b) => { await useDb(async (db) => { - await db.i_users.add(mockUser1) + await db.i_users.add(mockUser1); const updateData: Partial = { address: { @@ -42,17 +42,17 @@ Deno.bench("collection - updateByPrimaryIndex (deep merge)", async (b) => { street: "Sesame Street", houseNr: null, }, - } + }; - b.start() + b.start(); await db.i_users.updateByPrimaryIndex( "username", mockUser1.username, updateData, { strategy: "merge" }, - ) + ); - b.end() - }) -}) + b.end(); + }); +}); diff --git a/benchmarks/serialized_collection/add.bench.ts b/benchmarks/serialized_collection/add.bench.ts index 04f3e96..2956a75 100644 --- a/benchmarks/serialized_collection/add.bench.ts +++ b/benchmarks/serialized_collection/add.bench.ts @@ -1,10 +1,10 @@ -import { mockUser1 } from "../../tests/mocks.ts" -import { useDb } from "../../tests/utils.ts" +import { mockUser1 } from "../../tests/mocks.ts"; +import { useDb } from "../../tests/utils.ts"; Deno.bench("serialized_collection - add", async (b) => { await useDb(async (db) => { - b.start() - await db.s_users.add(mockUser1) - b.end() - }) -}) + b.start(); + await db.s_users.add(mockUser1); + b.end(); + }); +}); diff --git a/benchmarks/serialized_collection/count.bench.ts b/benchmarks/serialized_collection/count.bench.ts index bbdf148..118b10a 100644 --- a/benchmarks/serialized_collection/count.bench.ts +++ b/benchmarks/serialized_collection/count.bench.ts @@ -1,12 +1,12 @@ -import { generateUsers, useDb } from "../../tests/utils.ts" +import { generateUsers, useDb } from "../../tests/utils.ts"; Deno.bench("serialized_collection - count [1_000]", async (b) => { await useDb(async (db) => { - const users = generateUsers(1_000) - await db.users.addMany(users) + const users = generateUsers(1_000); + await db.users.addMany(users); - b.start() - await db.s_users.count() - b.end() - }) -}) + b.start(); + await db.s_users.count(); + b.end(); + }); +}); diff --git a/benchmarks/serialized_collection/delete.bench.ts b/benchmarks/serialized_collection/delete.bench.ts index ec298f2..851cb6a 100644 --- a/benchmarks/serialized_collection/delete.bench.ts +++ b/benchmarks/serialized_collection/delete.bench.ts @@ -1,13 +1,13 @@ -import { mockUser1 } from "../../tests/mocks.ts" -import { useDb } from "../../tests/utils.ts" +import { mockUser1 } from "../../tests/mocks.ts"; +import { useDb } from "../../tests/utils.ts"; Deno.bench("serialized_collection - delete [1]", async (b) => { await useDb(async (db) => { - const id = crypto.randomUUID() - await db.s_users.set(id, mockUser1) + const id = crypto.randomUUID(); + await db.s_users.set(id, mockUser1); - b.start() - await db.s_users.delete(id) - b.end() - }) -}) + b.start(); + await db.s_users.delete(id); + b.end(); + }); +}); diff --git a/benchmarks/serialized_collection/deleteMany.bench.ts b/benchmarks/serialized_collection/deleteMany.bench.ts index edb5b39..a3b8491 100644 --- a/benchmarks/serialized_collection/deleteMany.bench.ts +++ b/benchmarks/serialized_collection/deleteMany.bench.ts @@ -1,12 +1,12 @@ -import { generateUsers, useDb } from "../../tests/utils.ts" +import { generateUsers, useDb } from "../../tests/utils.ts"; Deno.bench("serialized_collection - deleteMany [1_000]", async (b) => { await useDb(async (db) => { - const users = generateUsers(1_000) - await db.s_users.addMany(users) + const users = generateUsers(1_000); + await db.s_users.addMany(users); - b.start() - await db.s_users.deleteMany() - b.end() - }) -}) + b.start(); + await db.s_users.deleteMany(); + b.end(); + }); +}); diff --git a/benchmarks/serialized_collection/find.bench.ts b/benchmarks/serialized_collection/find.bench.ts index fa03066..b261dff 100644 --- a/benchmarks/serialized_collection/find.bench.ts +++ b/benchmarks/serialized_collection/find.bench.ts @@ -1,13 +1,13 @@ -import { mockUser1 } from "../../tests/mocks.ts" -import { useDb } from "../../tests/utils.ts" +import { mockUser1 } from "../../tests/mocks.ts"; +import { useDb } from "../../tests/utils.ts"; Deno.bench("serialized_collection - find", async (b) => { await useDb(async (db) => { - const id = "id" - await db.s_users.set(id, mockUser1) + const id = "id"; + await db.s_users.set(id, mockUser1); - b.start() - await db.s_users.find(id) - b.end() - }) -}) + b.start(); + await db.s_users.find(id); + b.end(); + }); +}); diff --git a/benchmarks/serialized_collection/findMany.bench.ts b/benchmarks/serialized_collection/findMany.bench.ts index c309071..a40806c 100644 --- a/benchmarks/serialized_collection/findMany.bench.ts +++ b/benchmarks/serialized_collection/findMany.bench.ts @@ -1,18 +1,18 @@ -import { generateUsers, useDb } from "../../tests/utils.ts" +import { generateUsers, useDb } from "../../tests/utils.ts"; Deno.bench("serialized_collection - findMany [1_000]", async (b) => { await useDb(async (db) => { - const users = generateUsers(1_000) - const ids: string[] = [] + const users = generateUsers(1_000); + const ids: string[] = []; for (const user of users) { - const id = crypto.randomUUID() - await db.s_users.set(id, user) - ids.push(id) + const id = crypto.randomUUID(); + await db.s_users.set(id, user); + ids.push(id); } - b.start() - await db.s_users.findMany(ids) - b.end() - }) -}) + b.start(); + await db.s_users.findMany(ids); + b.end(); + }); +}); diff --git a/benchmarks/serialized_collection/getMany.bench.ts b/benchmarks/serialized_collection/getMany.bench.ts index 6393ecd..9af035b 100644 --- a/benchmarks/serialized_collection/getMany.bench.ts +++ b/benchmarks/serialized_collection/getMany.bench.ts @@ -1,12 +1,12 @@ -import { generateUsers, useDb } from "../../tests/utils.ts" +import { generateUsers, useDb } from "../../tests/utils.ts"; Deno.bench("serialized_collection - getMany [1_000]", async (b) => { await useDb(async (db) => { - const users = generateUsers(1_000) - await db.s_users.addMany(users) + const users = generateUsers(1_000); + await db.s_users.addMany(users); - b.start() - await db.s_users.getMany() - b.end() - }) -}) + b.start(); + await db.s_users.getMany(); + b.end(); + }); +}); diff --git a/benchmarks/serialized_collection/update.bench.ts b/benchmarks/serialized_collection/update.bench.ts index 979be58..6dc50f7 100644 --- a/benchmarks/serialized_collection/update.bench.ts +++ b/benchmarks/serialized_collection/update.bench.ts @@ -1,24 +1,24 @@ -import { mockUser1 } from "../../tests/mocks.ts" -import type { User } from "../../tests/models.ts" -import { useDb } from "../../tests/utils.ts" +import { mockUser1 } from "../../tests/mocks.ts"; +import type { User } from "../../tests/models.ts"; +import { useDb } from "../../tests/utils.ts"; Deno.bench("serialized_collection - update (replace)", async (b) => { await useDb(async (db) => { - const id = "id" - await db.s_u64s.set(id, new Deno.KvU64(100n)) + const id = "id"; + await db.s_u64s.set(id, new Deno.KvU64(100n)); - const updateData = new Deno.KvU64(200n) + const updateData = new Deno.KvU64(200n); - b.start() - await db.s_u64s.update(id, updateData) - b.end() - }) -}) + b.start(); + await db.s_u64s.update(id, updateData); + b.end(); + }); +}); Deno.bench("serialized_collection - update (shallow merge)", async (b) => { await useDb(async (db) => { - const id = "id" - await db.s_users.set(id, mockUser1) + const id = "id"; + await db.s_users.set(id, mockUser1); const updateData: Partial = { address: { @@ -27,18 +27,18 @@ Deno.bench("serialized_collection - update (shallow merge)", async (b) => { street: "Sesame Street", houseNr: null, }, - } + }; - b.start() - await db.s_users.update(id, updateData, { strategy: "merge-shallow" }) - b.end() - }) -}) + b.start(); + await db.s_users.update(id, updateData, { strategy: "merge-shallow" }); + b.end(); + }); +}); Deno.bench("serialized_collection - update (deep merge)", async (b) => { await useDb(async (db) => { - const id = "id" - await db.s_users.set(id, mockUser1) + const id = "id"; + await db.s_users.set(id, mockUser1); const updateData: Partial = { address: { @@ -47,10 +47,10 @@ Deno.bench("serialized_collection - update (deep merge)", async (b) => { street: "Sesame Street", houseNr: null, }, - } + }; - b.start() - await db.s_users.update(id, updateData, { strategy: "merge" }) - b.end() - }) -}) + b.start(); + await db.s_users.update(id, updateData, { strategy: "merge" }); + b.end(); + }); +}); diff --git a/benchmarks/serialized_indexable_collection/add.bench.ts b/benchmarks/serialized_indexable_collection/add.bench.ts index 0822d4d..42abb16 100644 --- a/benchmarks/serialized_indexable_collection/add.bench.ts +++ b/benchmarks/serialized_indexable_collection/add.bench.ts @@ -1,10 +1,10 @@ -import { mockUser1 } from "../../tests/mocks.ts" -import { useDb } from "../../tests/utils.ts" +import { mockUser1 } from "../../tests/mocks.ts"; +import { useDb } from "../../tests/utils.ts"; Deno.bench("serialized_indexable_collection - add", async (b) => { await useDb(async (db) => { - b.start() - await db.is_users.add(mockUser1) - b.end() - }) -}) + b.start(); + await db.is_users.add(mockUser1); + b.end(); + }); +}); diff --git a/benchmarks/serialized_indexable_collection/count.bench.ts b/benchmarks/serialized_indexable_collection/count.bench.ts index d95a350..3e0fb8f 100644 --- a/benchmarks/serialized_indexable_collection/count.bench.ts +++ b/benchmarks/serialized_indexable_collection/count.bench.ts @@ -1,12 +1,12 @@ -import { generateUsers, useDb } from "../../tests/utils.ts" +import { generateUsers, useDb } from "../../tests/utils.ts"; Deno.bench("serialized_indexable_collection - count [1_000]", async (b) => { await useDb(async (db) => { - const users = generateUsers(1_000) - await db.is_users.addMany(users) + const users = generateUsers(1_000); + await db.is_users.addMany(users); - b.start() - await db.is_users.count() - b.end() - }) -}) + b.start(); + await db.is_users.count(); + b.end(); + }); +}); diff --git a/benchmarks/serialized_indexable_collection/delete.bench.ts b/benchmarks/serialized_indexable_collection/delete.bench.ts index f69bbd0..b31e7dc 100644 --- a/benchmarks/serialized_indexable_collection/delete.bench.ts +++ b/benchmarks/serialized_indexable_collection/delete.bench.ts @@ -1,13 +1,13 @@ -import { mockUser1 } from "../../tests/mocks.ts" -import { useDb } from "../../tests/utils.ts" +import { mockUser1 } from "../../tests/mocks.ts"; +import { useDb } from "../../tests/utils.ts"; Deno.bench("serialized_indexable_collection - delete [1]", async (b) => { await useDb(async (db) => { - const id = crypto.randomUUID() - await db.is_users.set(id, mockUser1) + const id = crypto.randomUUID(); + await db.is_users.set(id, mockUser1); - b.start() - await db.is_users.delete(id) - b.end() - }) -}) + b.start(); + await db.is_users.delete(id); + b.end(); + }); +}); diff --git a/benchmarks/serialized_indexable_collection/deleteByPrimaryIndex.bench.ts b/benchmarks/serialized_indexable_collection/deleteByPrimaryIndex.bench.ts index 9ed6e65..ed83597 100644 --- a/benchmarks/serialized_indexable_collection/deleteByPrimaryIndex.bench.ts +++ b/benchmarks/serialized_indexable_collection/deleteByPrimaryIndex.bench.ts @@ -1,15 +1,15 @@ -import { mockUser1 } from "../../tests/mocks.ts" -import { useDb } from "../../tests/utils.ts" +import { mockUser1 } from "../../tests/mocks.ts"; +import { useDb } from "../../tests/utils.ts"; Deno.bench( "serialized_indexable_collection - deleteByPrimaryIndex", async (b) => { await useDb(async (db) => { - await db.is_users.add(mockUser1) + await db.is_users.add(mockUser1); - b.start() - await db.is_users.deleteByPrimaryIndex("username", mockUser1.username) - b.end() - }) + b.start(); + await db.is_users.deleteByPrimaryIndex("username", mockUser1.username); + b.end(); + }); }, -) +); diff --git a/benchmarks/serialized_indexable_collection/deleteMany.bench.ts b/benchmarks/serialized_indexable_collection/deleteMany.bench.ts index 95c3059..1ad4091 100644 --- a/benchmarks/serialized_indexable_collection/deleteMany.bench.ts +++ b/benchmarks/serialized_indexable_collection/deleteMany.bench.ts @@ -1,15 +1,15 @@ -import { generateUsers, useDb } from "../../tests/utils.ts" +import { generateUsers, useDb } from "../../tests/utils.ts"; Deno.bench( "serialized_indexable_collection - deleteMany [1_000]", async (b) => { await useDb(async (db) => { - const users = generateUsers(1_000) - await db.is_users.addMany(users) + const users = generateUsers(1_000); + await db.is_users.addMany(users); - b.start() - await db.is_users.deleteMany() - b.end() - }) + b.start(); + await db.is_users.deleteMany(); + b.end(); + }); }, -) +); diff --git a/benchmarks/serialized_indexable_collection/find.bench.ts b/benchmarks/serialized_indexable_collection/find.bench.ts index e8f899c..ed27730 100644 --- a/benchmarks/serialized_indexable_collection/find.bench.ts +++ b/benchmarks/serialized_indexable_collection/find.bench.ts @@ -1,13 +1,13 @@ -import { mockUser1 } from "../../tests/mocks.ts" -import { useDb } from "../../tests/utils.ts" +import { mockUser1 } from "../../tests/mocks.ts"; +import { useDb } from "../../tests/utils.ts"; Deno.bench("serialized_indexable_collection - find", async (b) => { await useDb(async (db) => { - const id = "id" - await db.is_users.set(id, mockUser1) + const id = "id"; + await db.is_users.set(id, mockUser1); - b.start() - await db.is_users.find(id) - b.end() - }) -}) + b.start(); + await db.is_users.find(id); + b.end(); + }); +}); diff --git a/benchmarks/serialized_indexable_collection/findByPrimaryIndex.bench.ts b/benchmarks/serialized_indexable_collection/findByPrimaryIndex.bench.ts index 9da33f9..ca6ffa6 100644 --- a/benchmarks/serialized_indexable_collection/findByPrimaryIndex.bench.ts +++ b/benchmarks/serialized_indexable_collection/findByPrimaryIndex.bench.ts @@ -1,15 +1,15 @@ -import { mockUser1 } from "../../tests/mocks.ts" -import { useDb } from "../../tests/utils.ts" +import { mockUser1 } from "../../tests/mocks.ts"; +import { useDb } from "../../tests/utils.ts"; Deno.bench( "serialized_indexable_collection - findByPrimaryIndex", async (b) => { await useDb(async (db) => { - await db.is_users.add(mockUser1) + await db.is_users.add(mockUser1); - b.start() - await db.is_users.findByPrimaryIndex("username", mockUser1.username) - b.end() - }) + b.start(); + await db.is_users.findByPrimaryIndex("username", mockUser1.username); + b.end(); + }); }, -) +); diff --git a/benchmarks/serialized_indexable_collection/findBySecondaryIndex.bench.ts b/benchmarks/serialized_indexable_collection/findBySecondaryIndex.bench.ts index 65e20ef..b719c63 100644 --- a/benchmarks/serialized_indexable_collection/findBySecondaryIndex.bench.ts +++ b/benchmarks/serialized_indexable_collection/findBySecondaryIndex.bench.ts @@ -1,15 +1,15 @@ -import { generateUsers, useDb } from "../../tests/utils.ts" +import { generateUsers, useDb } from "../../tests/utils.ts"; Deno.bench( "serialized_indexable_collection - findBySecondaryIndex [1_000]", async (b) => { await useDb(async (db) => { - const users = generateUsers(1_000) - await db.is_users.addMany(users) + const users = generateUsers(1_000); + await db.is_users.addMany(users); - b.start() - await db.is_users.findBySecondaryIndex("age", users[0].age) - b.end() - }) + b.start(); + await db.is_users.findBySecondaryIndex("age", users[0].age); + b.end(); + }); }, -) +); diff --git a/benchmarks/serialized_indexable_collection/findMany.bench.ts b/benchmarks/serialized_indexable_collection/findMany.bench.ts index 4ca4f10..ccc646c 100644 --- a/benchmarks/serialized_indexable_collection/findMany.bench.ts +++ b/benchmarks/serialized_indexable_collection/findMany.bench.ts @@ -1,18 +1,18 @@ -import { generateUsers, useDb } from "../../tests/utils.ts" +import { generateUsers, useDb } from "../../tests/utils.ts"; Deno.bench("serialized_indexable_collection - findMany [1_000]", async (b) => { await useDb(async (db) => { - const users = generateUsers(1_000) - const ids: string[] = [] + const users = generateUsers(1_000); + const ids: string[] = []; for (const user of users) { - const id = crypto.randomUUID() - await db.is_users.set(id, user) - ids.push(id) + const id = crypto.randomUUID(); + await db.is_users.set(id, user); + ids.push(id); } - b.start() - await db.is_users.findMany(ids) - b.end() - }) -}) + b.start(); + await db.is_users.findMany(ids); + b.end(); + }); +}); diff --git a/benchmarks/serialized_indexable_collection/getMany.bench.ts b/benchmarks/serialized_indexable_collection/getMany.bench.ts index fe9fea5..8d424c6 100644 --- a/benchmarks/serialized_indexable_collection/getMany.bench.ts +++ b/benchmarks/serialized_indexable_collection/getMany.bench.ts @@ -1,12 +1,12 @@ -import { generateUsers, useDb } from "../../tests/utils.ts" +import { generateUsers, useDb } from "../../tests/utils.ts"; Deno.bench("serialized_indexable_collection - getMany [1_000]", async (b) => { await useDb(async (db) => { - const users = generateUsers(1_000) - await db.is_users.addMany(users) + const users = generateUsers(1_000); + await db.is_users.addMany(users); - b.start() - await db.is_users.getMany() - b.end() - }) -}) + b.start(); + await db.is_users.getMany(); + b.end(); + }); +}); diff --git a/benchmarks/serialized_indexable_collection/update.bench.ts b/benchmarks/serialized_indexable_collection/update.bench.ts index ecb5ca8..3543c1a 100644 --- a/benchmarks/serialized_indexable_collection/update.bench.ts +++ b/benchmarks/serialized_indexable_collection/update.bench.ts @@ -1,13 +1,13 @@ -import { mockUser1 } from "../../tests/mocks.ts" -import type { User } from "../../tests/models.ts" -import { useDb } from "../../tests/utils.ts" +import { mockUser1 } from "../../tests/mocks.ts"; +import type { User } from "../../tests/models.ts"; +import { useDb } from "../../tests/utils.ts"; Deno.bench( "serialized_indexable_collection - update (shallow merge)", async (b) => { await useDb(async (db) => { - const id = "id" - await db.is_users.set(id, mockUser1) + const id = "id"; + await db.is_users.set(id, mockUser1); const updateData: Partial = { address: { @@ -16,19 +16,19 @@ Deno.bench( street: "Sesame Street", houseNr: null, }, - } + }; - b.start() - await db.is_users.update(id, updateData, { strategy: "merge-shallow" }) - b.end() - }) + b.start(); + await db.is_users.update(id, updateData, { strategy: "merge-shallow" }); + b.end(); + }); }, -) +); Deno.bench("serialized_collection - update (deep merge)", async (b) => { await useDb(async (db) => { - const id = "id" - await db.is_users.set(id, mockUser1) + const id = "id"; + await db.is_users.set(id, mockUser1); const updateData: Partial = { address: { @@ -37,10 +37,10 @@ Deno.bench("serialized_collection - update (deep merge)", async (b) => { street: "Sesame Street", houseNr: null, }, - } + }; - b.start() - await db.is_users.update(id, updateData, { strategy: "merge" }) - b.end() - }) -}) + b.start(); + await db.is_users.update(id, updateData, { strategy: "merge" }); + b.end(); + }); +}); diff --git a/benchmarks/serialized_indexable_collection/updateByPrimaryIndex.bench.ts b/benchmarks/serialized_indexable_collection/updateByPrimaryIndex.bench.ts index dff6500..e59c863 100644 --- a/benchmarks/serialized_indexable_collection/updateByPrimaryIndex.bench.ts +++ b/benchmarks/serialized_indexable_collection/updateByPrimaryIndex.bench.ts @@ -1,12 +1,12 @@ -import { mockUser1 } from "../../tests/mocks.ts" -import type { User } from "../../tests/models.ts" -import { useDb } from "../../tests/utils.ts" +import { mockUser1 } from "../../tests/mocks.ts"; +import type { User } from "../../tests/models.ts"; +import { useDb } from "../../tests/utils.ts"; Deno.bench( "serialized_indexable_collection - updateByPrimaryIndex (shallow merge)", async (b) => { await useDb(async (db) => { - await db.is_users.add(mockUser1) + await db.is_users.add(mockUser1); const updateData: Partial = { address: { @@ -15,27 +15,27 @@ Deno.bench( street: "Sesame Street", houseNr: null, }, - } + }; - b.start() + b.start(); await db.is_users.updateByPrimaryIndex( "username", mockUser1.username, updateData, { strategy: "merge-shallow" }, - ) + ); - b.end() - }) + b.end(); + }); }, -) +); Deno.bench( "serialized_collection - updateByPrimaryIndex (deep merge)", async (b) => { await useDb(async (db) => { - await db.is_users.add(mockUser1) + await db.is_users.add(mockUser1); const updateData: Partial = { address: { @@ -44,18 +44,18 @@ Deno.bench( street: "Sesame Street", houseNr: null, }, - } + }; - b.start() + b.start(); await db.is_users.updateByPrimaryIndex( "username", mockUser1.username, updateData, { strategy: "merge" }, - ) + ); - b.end() - }) + b.end(); + }); }, -) +); diff --git a/benchmarks/utils/_object.ts b/benchmarks/utils/_object.ts index 455f745..e02463a 100644 --- a/benchmarks/utils/_object.ts +++ b/benchmarks/utils/_object.ts @@ -1,26 +1,26 @@ -export const obj = createLargeObject() +export const obj = createLargeObject(); function createLargeObject(depth = 6) { if (depth <= 0) { - return null + return null; } - const arr: any[] = [] - const obj: Record = {} + const arr: any[] = []; + const obj: Record = {}; for (let i = 0; i < 25_000; i++) { - arr.push(crypto.randomUUID()) + arr.push(crypto.randomUUID()); if (i % 1_000 === 0) { - obj[crypto.randomUUID()] = 100n + obj[crypto.randomUUID()] = 100n; } if (i % 12_500 === 0) { - obj[crypto.randomUUID()] = createLargeObject(depth - 1) + obj[crypto.randomUUID()] = createLargeObject(depth - 1); } } - obj[crypto.randomUUID()] = arr + obj[crypto.randomUUID()] = arr; - return obj + return obj; } diff --git a/benchmarks/utils/deserialize.bench.ts b/benchmarks/utils/deserialize.bench.ts index 35d197b..6795fe1 100644 --- a/benchmarks/utils/deserialize.bench.ts +++ b/benchmarks/utils/deserialize.bench.ts @@ -1,24 +1,24 @@ -import { obj } from "./_object.ts" +import { obj } from "./_object.ts"; import { jsonDeserialize, jsonSerialize, v8Deserialize, v8Serialize, -} from "../../src/utils.ts" +} from "../../src/utils.ts"; -const js = jsonSerialize(obj) -const ds = v8Serialize(obj) +const js = jsonSerialize(obj); +const ds = v8Serialize(obj); Deno.bench( `utils - jsonDeserialize (${js.byteLength / 1024 / 1024} MB)`, () => { - jsonDeserialize(js) + jsonDeserialize(js); }, -) +); Deno.bench( `utils - v8Deserialize - (${ds.byteLength / 1024 / 1024} MS)`, () => { - v8Deserialize(ds) + v8Deserialize(ds); }, -) +); diff --git a/benchmarks/utils/serialize.bench.ts b/benchmarks/utils/serialize.bench.ts index 8c509ef..dba55ed 100644 --- a/benchmarks/utils/serialize.bench.ts +++ b/benchmarks/utils/serialize.bench.ts @@ -1,10 +1,10 @@ -import { obj } from "./_object.ts" -import { jsonSerialize, v8Serialize } from "../../src/utils.ts" +import { obj } from "./_object.ts"; +import { jsonSerialize, v8Serialize } from "../../src/utils.ts"; Deno.bench("utils - jsonSerialize", () => { - jsonSerialize(obj) -}) + jsonSerialize(obj); +}); Deno.bench("utils - v8Serialize", () => { - v8Serialize(obj) -}) + v8Serialize(obj); +}); diff --git a/deno.json b/deno.json index 22c43ff..079dedb 100644 --- a/deno.json +++ b/deno.json @@ -8,15 +8,12 @@ "./kv": "./src/ext/kv/mod.ts" }, "tasks": { - "check": "deno check mod.ts src/*.ts tests/**/*.ts benchmarks/**/*.ts", + "check": "deno check **/*.ts", "test": "deno test --allow-write --allow-read --allow-ffi --allow-sys --unstable-kv --trace-leaks", "bench": "deno bench --unstable-kv", "prep": "deno task check && deno fmt && deno lint && deno publish --dry-run --allow-dirty && deno task test", "cache": "deno cache -r mod.ts && deno cache -r ext/zod.ts && deno cache -r ext/migrate.ts" }, - "fmt": { - "semiColons": false - }, "lint": { "rules": { "exclude": ["no-explicit-any"], diff --git a/mod.ts b/mod.ts index e78c448..ccbcfaa 100644 --- a/mod.ts +++ b/mod.ts @@ -1,9 +1,9 @@ // Expose constructor functions and classes -export { model } from "./src/model.ts" -export { Kvdex, kvdex } from "./src/kvdex.ts" -export { Collection, collection } from "./src/collection.ts" -export { AtomicBuilder } from "./src/atomic_builder.ts" -export { Document } from "./src/document.ts" +export { model } from "./src/model.ts"; +export { Kvdex, kvdex } from "./src/kvdex.ts"; +export { Collection, collection } from "./src/collection.ts"; +export { AtomicBuilder } from "./src/atomic_builder.ts"; +export { Document } from "./src/document.ts"; // Expose reusable utilities export { @@ -11,10 +11,10 @@ export { jsonParse, jsonSerialize, jsonStringify, -} from "./src/utils.ts" +} from "./src/utils.ts"; // Expose errors -export * from "./src/errors.ts" +export * from "./src/errors.ts"; // Expose types -export type * from "./src/types.ts" +export type * from "./src/types.ts"; diff --git a/src/atomic_builder.ts b/src/atomic_builder.ts index 1f9eb27..28e3585 100644 --- a/src/atomic_builder.ts +++ b/src/atomic_builder.ts @@ -1,6 +1,6 @@ -import type { Collection } from "./collection.ts" -import { ulid } from "./deps.ts" -import { InvalidCollectionError } from "./errors.ts" +import type { Collection } from "./collection.ts"; +import { ulid } from "./deps.ts"; +import { InvalidCollectionError } from "./errors.ts"; import type { AtomicCheck, AtomicMutation, @@ -20,7 +20,7 @@ import type { ParseId, Schema, SchemaDefinition, -} from "./types.ts" +} from "./types.ts"; import { allFulfilled, deleteIndices, @@ -28,7 +28,7 @@ import { keyEq, prepareEnqueue, setIndices, -} from "./utils.ts" +} from "./utils.ts"; /** * Builder object for creating and executing atomic operations in the KV store. @@ -42,14 +42,14 @@ export class AtomicBuilder< const TOutput extends KvValue, const TOptions extends CollectionOptions, > { - private kv: DenoKv - private schema: TSchema - private operations: Operations + private kv: DenoKv; + private schema: TSchema; + private operations: Operations; private collection: Collection< TInput, TOutput, CollectionOptions - > + >; /** * Create a new AtomicBuilder for building and executing atomic operations in the KV store. @@ -73,13 +73,13 @@ export class AtomicBuilder< if (collection._isSerialized) { throw new InvalidCollectionError( "Atomic operations are not supported for serialized collections", - ) + ); } // Set kv, schema and collection context - this.kv = kv - this.schema = schema - this.collection = collection + this.kv = kv; + this.schema = schema; + this.collection = collection; // Initiate operations or set from given operations this.operations = operations ?? { @@ -88,7 +88,7 @@ export class AtomicBuilder< prepareDeleteFns: [], indexDeleteCollectionKeys: [], indexAddCollectionKeys: [], - } + }; } /** @@ -117,7 +117,7 @@ export class AtomicBuilder< this.schema, selector(this.schema), this.operations, - ) + ); } /** @@ -141,7 +141,7 @@ export class AtomicBuilder< value: TInput, options?: AtomicSetOptions, ): this { - return this.setDocument(null, value, options) + return this.setDocument(null, value, options); } /** @@ -167,7 +167,7 @@ export class AtomicBuilder< value: TInput, options?: AtomicSetOptions, ): this { - return this.setDocument(id, value, options) + return this.setDocument(id, value, options); } /** @@ -185,42 +185,42 @@ export class AtomicBuilder< */ delete(id: ParseId): this { // Create id key from id and collection id key - const collection = this.collection - const idKey = extendKey(collection._keys.id, id) + const collection = this.collection; + const idKey = extendKey(collection._keys.id, id); // Add delete operation - this.operations.atomic.delete(idKey) + this.operations.atomic.delete(idKey); // If collection is indexable, handle indexing if (this.collection._isIndexable) { // Add collection key for collision detection - this.operations.indexDeleteCollectionKeys.push(collection._keys.base) + this.operations.indexDeleteCollectionKeys.push(collection._keys.base); // Add delete preperation function to prepeare delete functions list this.operations.prepareDeleteFns.push(async (kv) => { - const doc = await kv.get(idKey) + const doc = await kv.get(idKey); return { id, data: doc.value as KvObject ?? {}, - } - }) + }; + }); } // Set history entry if keeps history if (this.collection._keepsHistory) { - const historyKey = extendKey(this.collection._keys.history, id, ulid()) + const historyKey = extendKey(this.collection._keys.history, id, ulid()); const historyEntry: HistoryEntry = { type: "delete", timestamp: new Date(), - } + }; - this.operations.atomic.set(historyKey, historyEntry) + this.operations.atomic.set(historyKey, historyEntry); } // Return current AtomicBuilder - return this + return this; } /** @@ -243,19 +243,19 @@ export class AtomicBuilder< // Create Denoatomic checks from atomci checks input list const checks: DenoAtomicCheck[] = atomicChecks.map( ({ id, versionstamp }) => { - const key = extendKey(this.collection._keys.id, id) + const key = extendKey(this.collection._keys.id, id); return { key, versionstamp, - } + }; }, - ) + ); // Add chech operation - this.operations.atomic.check(...checks) + this.operations.atomic.check(...checks); // Return current AtomicBuilder - return this + return this; } /** @@ -277,9 +277,9 @@ export class AtomicBuilder< id: ParseId, value: TOutput extends DenoKvU64 ? bigint : never, ): this { - const idKey = extendKey(this.collection._keys.id, id) - this.operations.atomic.sum(idKey, value) - return this + const idKey = extendKey(this.collection._keys.id, id); + this.operations.atomic.sum(idKey, value); + return this; } /** @@ -302,9 +302,9 @@ export class AtomicBuilder< id: ParseId, value: TOutput extends DenoKvU64 ? bigint : never, ): this { - const idKey = extendKey(this.collection._keys.id, id) - this.operations.atomic.min(idKey, value) - return this + const idKey = extendKey(this.collection._keys.id, id); + this.operations.atomic.min(idKey, value); + return this; } /** @@ -327,9 +327,9 @@ export class AtomicBuilder< id: ParseId, value: TOutput extends DenoKvU64 ? bigint : never, ): this { - const idKey = extendKey(this.collection._keys.id, id) - this.operations.atomic.max(idKey, value) - return this + const idKey = extendKey(this.collection._keys.id, id); + this.operations.atomic.max(idKey, value); + return this; } /** @@ -362,28 +362,28 @@ export class AtomicBuilder< mutations.forEach(({ id, ...rest }) => { switch (rest.type) { case "delete": - this.delete(id) - break + this.delete(id); + break; case "set": - this.set(id, rest.value, { expireIn: rest.expireIn }) - break + this.set(id, rest.value, { expireIn: rest.expireIn }); + break; case "add": - this.add(rest.value, { expireIn: rest.expireIn }) - break + this.add(rest.value, { expireIn: rest.expireIn }); + break; case "max": - this.max(id, rest.value as any) - break + this.max(id, rest.value as any); + break; case "min": - this.min(id, rest.value as any) - break + this.min(id, rest.value as any); + break; case "sum": - this.sum(id, rest.value as any) - break + this.sum(id, rest.value as any); + break; } - }) + }); // Return current AtomicBuilder - return this + return this; } /** @@ -421,12 +421,12 @@ export class AtomicBuilder< this.collection._keys.undelivered, data, options, - ) + ); - this.operations.atomic.enqueue(prep.msg, prep.options) + this.operations.atomic.enqueue(prep.msg, prep.options); // Return current AtomicBuilder - return this + return this; } /** @@ -438,7 +438,7 @@ export class AtomicBuilder< async commit(): Promise { // Perform async mutations for (const mut of this.operations.asyncMutations) { - await mut() + await mut(); } // Check for key collisions between add/delete @@ -452,16 +452,16 @@ export class AtomicBuilder< // If collisions are detected, return commit error return { ok: false, - } + }; } // Prepare delete ops const preparedIndexDeletes = await allFulfilled( this.operations.prepareDeleteFns.map((fn) => fn(this.kv)), - ) + ); // Execute atomic operation - const commitResult = await this.operations.atomic.commit() + const commitResult = await this.operations.atomic.commit(); // If successful commit, perform delete ops if (commitResult.ok) { @@ -471,10 +471,10 @@ export class AtomicBuilder< const { id, data, - } = preparedDelete + } = preparedDelete; // Initiate atomic operation for index deletions - const atomic = this.kv.atomic() + const atomic = this.kv.atomic(); // Set index delete operations using atomic operation await deleteIndices( @@ -482,16 +482,16 @@ export class AtomicBuilder< data, atomic, this.collection, - ) + ); // Execute atomic operation - await atomic.commit() + await atomic.commit(); }), - ) + ); } // Return commit result - return commitResult + return commitResult; } /***********************/ @@ -515,22 +515,22 @@ export class AtomicBuilder< ) { this.operations.asyncMutations.push(async () => { // Create id key from collection id key and id - const collection = this.collection + const collection = this.collection; const parsed = collection._model._transform?.(value as TInput) ?? - collection._model.parse(value) + collection._model.parse(value); - const docId = id ?? await collection._idGenerator(parsed) - const idKey = extendKey(collection._keys.id, docId) + const docId = id ?? await collection._idGenerator(parsed); + const idKey = extendKey(collection._keys.id, docId); // Add set operation this.operations.atomic .check({ key: idKey, versionstamp: null }) - .set(idKey, parsed, options) + .set(idKey, parsed, options); if (collection._isIndexable) { // Add collection id key for collision detection - this.operations.indexAddCollectionKeys.push(collection._keys.base) + this.operations.indexAddCollectionKeys.push(collection._keys.base); // Add indexing operations await setIndices( @@ -540,7 +540,7 @@ export class AtomicBuilder< this.operations.atomic, this.collection, options, - ) + ); } // Set history entry if keeps history @@ -549,19 +549,19 @@ export class AtomicBuilder< this.collection._keys.history, docId, ulid(), - ) + ); const historyEntry: HistoryEntry = { type: "write", timestamp: new Date(), value: parsed, - } + }; - this.operations.atomic.set(historyKey, historyEntry) + this.operations.atomic.set(historyKey, historyEntry); } - }) + }); // Return current AtomicBuilder - return this + return this; } } diff --git a/src/atomic_pool.ts b/src/atomic_pool.ts index 7979429..88832df 100644 --- a/src/atomic_pool.ts +++ b/src/atomic_pool.ts @@ -5,62 +5,62 @@ import type { DenoKvCommitError, DenoKvCommitResult, DenoKvStrictKey, -} from "./types.ts" +} from "./types.ts"; /** Holds atomic mutations in a pool until bound to an actual atomic operation */ export class AtomicPool implements DenoAtomicOperation { - private pool: Array<(op: DenoAtomicOperation) => DenoAtomicOperation> + private pool: Array<(op: DenoAtomicOperation) => DenoAtomicOperation>; constructor() { - this.pool = [] + this.pool = []; } set(key: DenoKvStrictKey, value: unknown, options?: AtomicSetOptions) { - this.pool.push((op) => op.set(key, value, options)) - return this + this.pool.push((op) => op.set(key, value, options)); + return this; } delete(key: DenoKvStrictKey) { - this.pool.push((op) => op.delete(key)) - return this + this.pool.push((op) => op.delete(key)); + return this; } check(...checks: DenoAtomicCheck[]) { - this.pool.push((op) => op.check(...checks)) - return this + this.pool.push((op) => op.check(...checks)); + return this; } sum(key: DenoKvStrictKey, n: bigint) { - this.pool.push((op) => op.sum(key, n)) - return this + this.pool.push((op) => op.sum(key, n)); + return this; } max(key: DenoKvStrictKey, n: bigint) { - this.pool.push((op) => op.max(key, n)) - return this + this.pool.push((op) => op.max(key, n)); + return this; } min(key: DenoKvStrictKey, n: bigint): this { - this.pool.push((op) => op.min(key, n)) - return this + this.pool.push((op) => op.min(key, n)); + return this; } enqueue( value: unknown, options?: { - delay?: number | undefined - keysIfUndelivered?: DenoKvStrictKey[] + delay?: number | undefined; + keysIfUndelivered?: DenoKvStrictKey[]; }, ) { - this.pool.push((op) => op.enqueue(value, options)) - return this + this.pool.push((op) => op.enqueue(value, options)); + return this; } commit(): Promise { - throw Error("Not Implemented") + throw Error("Not Implemented"); } bindTo(atomic: DenoAtomicOperation) { - this.pool.forEach((mutation) => mutation(atomic)) + this.pool.forEach((mutation) => mutation(atomic)); } } diff --git a/src/atomic_wrapper.ts b/src/atomic_wrapper.ts index ff8136e..7d1fc82 100644 --- a/src/atomic_wrapper.ts +++ b/src/atomic_wrapper.ts @@ -3,7 +3,7 @@ import { ATOMIC_OPERATION_MUTATION_LIMIT, ATOMIC_OPERATION_SIZE_LIMIT, ATOMIC_OPERTION_CHECK_LIMIT, -} from "./constants.ts" +} from "./constants.ts"; import type { AtomicSetOptions, DenoAtomicCheck, @@ -12,68 +12,68 @@ import type { DenoKvCommitError, DenoKvCommitResult, DenoKvStrictKey, -} from "./types.ts" +} from "./types.ts"; /** * Implements the AtomicOperation interface and automatically executes * batched operations using a dynamic attributes such as count and size. */ export class AtomicWrapper implements DenoAtomicOperation { - private kv: DenoKv - private atomics: DenoAtomicOperation[] - private currentAtomic: DenoAtomicOperation - private currentCount: number - private currentCheckCount: number - private currentSize: number - private currentKeySize: number + private kv: DenoKv; + private atomics: DenoAtomicOperation[]; + private currentAtomic: DenoAtomicOperation; + private currentCount: number; + private currentCheckCount: number; + private currentSize: number; + private currentKeySize: number; constructor(kv: DenoKv) { - this.kv = kv - this.atomics = [] - this.currentAtomic = kv.atomic() - this.currentCount = 0 - this.currentCheckCount = 0 - this.currentSize = 0 - this.currentKeySize = 0 + this.kv = kv; + this.atomics = []; + this.currentAtomic = kv.atomic(); + this.currentCount = 0; + this.currentCheckCount = 0; + this.currentSize = 0; + this.currentKeySize = 0; } set(key: DenoKvStrictKey, value: unknown, options?: AtomicSetOptions) { - this.addMutation((op) => op.set(key, value, options), 67, 2, false) - return this + this.addMutation((op) => op.set(key, value, options), 67, 2, false); + return this; } delete(key: DenoKvStrictKey) { - this.addMutation((op) => op.delete(key), 3, 2, false) - return this + this.addMutation((op) => op.delete(key), 3, 2, false); + return this; } check(...checks: DenoAtomicCheck[]) { checks.forEach((check) => this.addMutation((op) => op.check(check), 3, 2, true) - ) - return this + ); + return this; } sum(key: DenoKvStrictKey, n: bigint) { - this.addMutation((op) => op.sum(key, n), 3, 2, false) - return this + this.addMutation((op) => op.sum(key, n), 3, 2, false); + return this; } max(key: DenoKvStrictKey, n: bigint) { - this.addMutation((op) => op.max(key, n), 3, 2, false) - return this + this.addMutation((op) => op.max(key, n), 3, 2, false); + return this; } min(key: DenoKvStrictKey, n: bigint): this { - this.addMutation((op) => op.min(key, n), 3, 2, false) - return this + this.addMutation((op) => op.min(key, n), 3, 2, false); + return this; } enqueue( value: unknown, options?: { - delay?: number | undefined - keysIfUndelivered?: DenoKvStrictKey[] | undefined + delay?: number | undefined; + keysIfUndelivered?: DenoKvStrictKey[] | undefined; } | undefined, ) { this.addMutation( @@ -81,37 +81,39 @@ export class AtomicWrapper implements DenoAtomicOperation { 96, 2 + ((options?.keysIfUndelivered?.length ?? 0) * 2), false, - ) + ); - return this + return this; } async commit(): Promise { // Add curent operation to atomics list if (this.currentCount > 0) { - this.atomics.push(this.currentAtomic) + this.atomics.push(this.currentAtomic); } // Commit all operations const settled = await Promise.allSettled( this.atomics.map((op) => op.commit()), - ) + ); // Check status of all commits - const success = settled.every((v) => v.status === "fulfilled" && v.value.ok) + const success = settled.every((v) => + v.status === "fulfilled" && v.value.ok + ); // If successful, return commit result if (success) { return { ok: true, versionstamp: (settled.at(0) as any)?.value.versionstamp ?? "0", - } + }; } // Return commit error return { ok: false, - } + }; } /** PRIVATE METHODS */ @@ -127,12 +129,12 @@ export class AtomicWrapper implements DenoAtomicOperation { keySize: number, isCheck: boolean, ) { - this.currentSize += size - this.currentKeySize += keySize - this.currentCount++ + this.currentSize += size; + this.currentKeySize += keySize; + this.currentCount++; if (isCheck) { - this.currentCheckCount++ + this.currentCheckCount++; } if ( @@ -141,14 +143,14 @@ export class AtomicWrapper implements DenoAtomicOperation { this.currentKeySize > ATOMIC_OPERATION_KEY_SIZE_LIMIT || this.currentCheckCount > ATOMIC_OPERTION_CHECK_LIMIT ) { - this.atomics.push(this.currentAtomic) - this.currentAtomic = this.kv.atomic() - this.currentCount = 0 - this.currentCheckCount = 0 - this.currentSize = 0 - this.currentKeySize = 0 + this.atomics.push(this.currentAtomic); + this.currentAtomic = this.kv.atomic(); + this.currentCount = 0; + this.currentCheckCount = 0; + this.currentSize = 0; + this.currentKeySize = 0; } - mutation(this.currentAtomic) + mutation(this.currentAtomic); } } diff --git a/src/collection.ts b/src/collection.ts index 319512a..00f68ca 100644 --- a/src/collection.ts +++ b/src/collection.ts @@ -44,7 +44,7 @@ import type { UpdateOptions, UpdateStrategy, WatchOptions, -} from "./types.ts" +} from "./types.ts"; import { allFulfilled, checkIndices, @@ -66,7 +66,7 @@ import { prepareEnqueue, selectsAll, setIndices, -} from "./utils.ts" +} from "./utils.ts"; import { DEFAULT_UPDATE_STRATEGY, HISTORY_KEY_PREFIX, @@ -77,14 +77,14 @@ import { SEGMENT_KEY_PREFIX, UINT8ARRAY_LENGTH_LIMIT, UNDELIVERED_KEY_PREFIX, -} from "./constants.ts" -import { AtomicWrapper } from "./atomic_wrapper.ts" -import { AtomicPool } from "./atomic_pool.ts" -import { Document } from "./document.ts" -import { model as m } from "./model.ts" -import { concat, deepMerge, ulid } from "./deps.ts" -import { v8Serialize } from "./utils.ts" -import { v8Deserialize } from "./utils.ts" +} from "./constants.ts"; +import { AtomicWrapper } from "./atomic_wrapper.ts"; +import { AtomicPool } from "./atomic_pool.ts"; +import { Document } from "./document.ts"; +import { model as m } from "./model.ts"; +import { concat, deepMerge, ulid } from "./deps.ts"; +import { v8Serialize } from "./utils.ts"; +import { v8Deserialize } from "./utils.ts"; /** * Create a new collection within a database context. @@ -136,7 +136,7 @@ export function collection< idempotentListener, model, options, - ) + ); } /** Represents a collection of documents and provides methods for handling them, alongside queues. */ @@ -145,19 +145,19 @@ export class Collection< const TOutput extends KvValue, const TOptions extends CollectionOptions, > { - private kv: DenoKv - private queueHandlers: QueueHandlers - private idempotentListener: IdempotentListener - - readonly _model: Model - readonly _primaryIndexList: string[] - readonly _secondaryIndexList: string[] - readonly _keys: CollectionKeys - readonly _idGenerator: IdGenerator> - readonly _serializer: Serializer - readonly _isIndexable: boolean - readonly _isSerialized: boolean - readonly _keepsHistory: boolean + private kv: DenoKv; + private queueHandlers: QueueHandlers; + private idempotentListener: IdempotentListener; + + readonly _model: Model; + readonly _primaryIndexList: string[]; + readonly _secondaryIndexList: string[]; + readonly _keys: CollectionKeys; + readonly _idGenerator: IdGenerator>; + readonly _serializer: Serializer; + readonly _isIndexable: boolean; + readonly _isSerialized: boolean; + readonly _keepsHistory: boolean; constructor( kv: DenoKv, @@ -168,11 +168,11 @@ export class Collection< options?: TOptions, ) { // Set basic fields - this.kv = kv - this.queueHandlers = queueHandlers - this.idempotentListener = idempotentListener - this._model = model - this._idGenerator = options?.idGenerator ?? generateId as any + this.kv = kv; + this.queueHandlers = queueHandlers; + this.idempotentListener = idempotentListener; + this._model = model; + this._idGenerator = options?.idGenerator ?? generateId as any; // Set keys this._keys = { @@ -213,25 +213,25 @@ export class Collection< SEGMENT_KEY_PREFIX, ...key, ), - } + }; // Check all possible options - const opts = (options ?? {}) as PossibleCollectionOptions + const opts = (options ?? {}) as PossibleCollectionOptions; // Set index lists - this._primaryIndexList = [] - this._secondaryIndexList = [] + this._primaryIndexList = []; + this._secondaryIndexList = []; Object.entries(opts?.indices ?? {}).forEach(([index, value]) => { if (value === "primary") { - this._primaryIndexList.push(index) + this._primaryIndexList.push(index); } else { - this._secondaryIndexList.push(index) + this._secondaryIndexList.push(index); } - }) + }); // Set serialization - this._isSerialized = !!opts?.serialize + this._isSerialized = !!opts?.serialize; if (opts?.serialize === "v8") { this._serializer = { @@ -239,28 +239,28 @@ export class Collection< deserialize: v8Deserialize, compress, decompress, - } + }; } else if (opts?.serialize === "v8-uncompressed") { this._serializer = { serialize: v8Serialize, deserialize: v8Deserialize, compress: (v) => v, decompress: (v) => v, - } + }; } else if (opts?.serialize === "json") { this._serializer = { serialize: jsonSerialize, deserialize: jsonDeserialize, compress, decompress, - } + }; } else if (opts?.serialize === "json-uncompressed") { this._serializer = { serialize: jsonSerialize, deserialize: jsonDeserialize, compress: (v) => v, decompress: (v) => v, - } + }; } else { this._serializer = { serialize: jsonSerialize, @@ -268,15 +268,15 @@ export class Collection< compress, decompress, ...opts?.serialize, - } + }; } // Set isIndexable flag this._isIndexable = this._primaryIndexList.length > 0 || - this._secondaryIndexList.length > 0 + this._secondaryIndexList.length > 0; // Set keepsHistory flag - this._keepsHistory = options?.history ?? false + this._keepsHistory = options?.history ?? false; } /**********************/ @@ -306,9 +306,9 @@ export class Collection< options?: FindOptions, ): Promise> | null> { // Create document key, get document entry - const key = extendKey(this._keys.id, id) - const entry = await this.kv.get(key, options) - return await this.constructDocument(entry) + const key = extendKey(this._keys.id, id); + const entry = await this.kv.get(key, options); + return await this.constructDocument(entry); } /** @@ -333,21 +333,21 @@ export class Collection< options?: FindOptions, ): Promise> | null> { // Serialize and compress index value - const serialized = await this._serializer.serialize(value) - const compressed = await this._serializer.compress(serialized) + const serialized = await this._serializer.serialize(value); + const compressed = await this._serializer.compress(serialized); // Create the index key const key = extendKey( this._keys.primaryIndex, index as KvId, compressed, - ) + ); // Get index entry - const entry = await this.kv.get(key, options) + const entry = await this.kv.get(key, options); // Return constructed document - return await this.constructDocument(entry) + return await this.constructDocument(entry); } /** @@ -382,22 +382,22 @@ export class Collection< >, ): Promise>>> { // Serialize and compress index value - const serialized = await this._serializer.serialize(value) - const compressed = await this._serializer.compress(serialized) + const serialized = await this._serializer.serialize(value); + const compressed = await this._serializer.compress(serialized); // Create prefix key const prefixKey = extendKey( this._keys.secondaryIndex, index as KvId, compressed, - ) + ); // Add documents to result list by secondary index return await this.handleMany( prefixKey, (doc) => doc, options, - ) + ); } /** @@ -424,25 +424,25 @@ export class Collection< options?: FindManyOptions, ): Promise>[]> { // Create document keys, get document entries - const keys = ids.map((id) => extendKey(this._keys.id, id)) - const entries = await kvGetMany(keys, this.kv, options) + const keys = ids.map((id) => extendKey(this._keys.id, id)); + const entries = await kvGetMany(keys, this.kv, options); // Create empty result list - const result: Document>[] = [] + const result: Document>[] = []; // Loop over entries, add to result list for (const entry of entries) { - const doc = await this.constructDocument(entry) + const doc = await this.constructDocument(entry); if (!doc) { - continue + continue; } - result.push(doc) + result.push(doc); } // Return result list - return result + return result; } /** @@ -471,65 +471,65 @@ export class Collection< options?: ListOptions, ParseId>, ): Promise>> { // Initialize result list and create history key prefix - const result: HistoryEntry[] = [] - const keyPrefix = extendKey(this._keys.history, id) - const selector = createListSelector(keyPrefix, options) + const result: HistoryEntry[] = []; + const keyPrefix = extendKey(this._keys.history, id); + const selector = createListSelector(keyPrefix, options); // Create hsitory entries iterator - const listOptions = createListOptions(options) - const iter = this.kv.list(selector, listOptions) + const listOptions = createListOptions(options); + const iter = this.kv.list(selector, listOptions); // Collect history entries - let count = 0 - const offset = options?.offset ?? 0 + let count = 0; + const offset = options?.offset ?? 0; for await (const { value, key } of iter) { // Skip by offset - count++ + count++; if (count <= offset) { - continue + continue; } // Cast history entry - let historyEntry = value as HistoryEntry + let historyEntry = value as HistoryEntry; // Handle serialized entries if (historyEntry.type === "write" && this._isSerialized) { - const { ids } = historyEntry.value as SerializedEntry - const timeId = getDocumentId(key as DenoKvStrictKey)! + const { ids } = historyEntry.value as SerializedEntry; + const timeId = getDocumentId(key as DenoKvStrictKey)!; const keys = ids.map((segmentId) => extendKey(this._keys.historySegment, id, timeId, segmentId) - ) + ); - const entries = await kvGetMany(keys, this.kv) + const entries = await kvGetMany(keys, this.kv); // Concatenate chunks - const data = concat(entries.map((entry) => entry.value as Uint8Array)) + const data = concat(entries.map((entry) => entry.value as Uint8Array)); // Decompress and deserialize - const serialized = await this._serializer.decompress(data) + const serialized = await this._serializer.decompress(data); const deserialized = await this._serializer.deserialize( serialized, - ) + ); // Set history entry historyEntry = { ...historyEntry, value: this._model.parse?.(deserialized), - } + }; } else if (historyEntry.type === "write") { // Set history entry historyEntry = { ...historyEntry, value: this._model.parse?.(historyEntry.value), - } + }; } // Filter and add history entry to result list - const filter = options?.filter + const filter = options?.filter; if (!filter || filter(historyEntry)) { - result.push(historyEntry) + result.push(historyEntry); } } @@ -537,7 +537,7 @@ export class Collection< return { result, cursor: iter.cursor || undefined, - } + }; } /** @@ -560,7 +560,7 @@ export class Collection< options?: SetOptions, ): Promise> | DenoKvCommitError> { // Set document value with generated id - return await this.setDocument(null, value, options) + return await this.setDocument(null, value, options); } /** @@ -586,7 +586,7 @@ export class Collection< data: TInput, options?: SetOptions, ): Promise> | DenoKvCommitError> { - return await this.setDocument(id, data, options) + return await this.setDocument(id, data, options); } /** @@ -602,7 +602,7 @@ export class Collection< * @returns A promise that resovles to void. */ async delete(...ids: ParseId[]): Promise { - await this.deleteDocuments(ids, this._keepsHistory) + await this.deleteDocuments(ids, this._keepsHistory); } /** @@ -627,31 +627,31 @@ export class Collection< options?: FindOptions, ): Promise { // Serialize and compress index value - const serialized = await this._serializer.serialize(value) - const compressed = await this._serializer.compress(serialized) + const serialized = await this._serializer.serialize(value); + const compressed = await this._serializer.compress(serialized); // Create index key const key = extendKey( this._keys.primaryIndex, index as KvId, compressed, - ) + ); // Get index entry - const result = await this.kv.get(key, options) + const result = await this.kv.get(key, options); // If no value, abort delete if (result.value === null || result.versionstamp === null) { - return + return; } // Extract document id from index entry const { __id__ } = result.value as & unknown - & Pick, "__id__"> + & Pick, "__id__">; // Delete document by id - await this.deleteDocuments([__id__], this._keepsHistory) + await this.deleteDocuments([__id__], this._keepsHistory); } /** @@ -684,25 +684,25 @@ export class Collection< >, ): Promise { // Serialize and compress index value - const serialized = await this._serializer.serialize(value) - const compressed = await this._serializer.compress(serialized) + const serialized = await this._serializer.serialize(value); + const compressed = await this._serializer.compress(serialized); // Create prefix key const prefixKey = extendKey( this._keys.secondaryIndex, index as KvId, compressed, - ) + ); // Delete documents by secondary index, return iterator cursor const { cursor } = await this.handleMany( prefixKey, (doc) => this.deleteDocuments([doc.id], this._keepsHistory), options, - ) + ); // Return iterator cursor - return { cursor } + return { cursor }; } /** @@ -739,17 +739,17 @@ export class Collection< options?: T, ): Promise> | DenoKvCommitError> { // Get document - const doc = await this.find(id) + const doc = await this.find(id); // If no document is found, return commit error if (!doc) { return { ok: false, - } + }; } // Update document and return commit result - return await this.updateDocument(doc, data, options) + return await this.updateDocument(doc, data, options); } /** @@ -788,17 +788,17 @@ export class Collection< options?: T, ): Promise> | DenoKvCommitError> { // Find document by primary index - const doc = await this.findByPrimaryIndex(index, value) + const doc = await this.findByPrimaryIndex(index, value); // If no document, return commit error if (!doc) { return { ok: false, - } + }; } // Update document, return result - return await this.updateDocument(doc, data, options) + return await this.updateDocument(doc, data, options); } /** @@ -847,22 +847,22 @@ export class Collection< > > { // Serialize and compress index value - const serialized = await this._serializer.serialize(value) - const compressed = await this._serializer.compress(serialized) + const serialized = await this._serializer.serialize(value); + const compressed = await this._serializer.compress(serialized); // Create prefix key const prefixKey = extendKey( this._keys.secondaryIndex, index as KvId, compressed, - ) + ); // Update each document by secondary index, add commit result to result list return await this.handleMany( prefixKey, (doc) => this.updateDocument(doc, data, options), options, - ) + ); } /** @@ -902,17 +902,17 @@ export class Collection< >, options?: TUpsertOptions, ): Promise> | DenoKvCommitError> { - const updateCr = await this.update(input.id, input.update, options) + const updateCr = await this.update(input.id, input.update, options); if (updateCr.ok) { - return updateCr + return updateCr; } // Set new entry with given id return await this.set(input.id, input.set, { ...options, overwrite: false, - }) + }); } /** @@ -961,10 +961,10 @@ export class Collection< ...input.index, input.update, options, - ) + ); if (updateCr.ok) { - return updateCr + return updateCr; } // If id is present, set new entry with given id @@ -972,14 +972,14 @@ export class Collection< return await this.set(input.id, input.set, { ...options, overwrite: false, - }) + }); } // If no id, add new entry with generated id return await this.add(input.set, { ...options, overwrite: false, - }) + }); } /** @@ -1028,7 +1028,7 @@ export class Collection< this._keys.id, (doc) => this.updateDocument(doc, value, options), options, - ) + ); } /** @@ -1061,14 +1061,14 @@ export class Collection< > > { // Create prefix key - const prefixKey = extendKey(this._keys.secondaryIndex, order as KvId) + const prefixKey = extendKey(this._keys.secondaryIndex, order as KvId); // Update each document by secondary index, add commit result to result list return await this.handleMany( prefixKey, (doc) => this.updateDocument(doc, data, options), options, - ) + ); } /** @@ -1107,12 +1107,12 @@ export class Collection< this._keys.id, (doc) => this.updateDocument(doc, data, options), { ...options, take: 1 }, - ) + ); // Return first result, or commit error object if not present return result.at(0) ?? { ok: false, - } + }; } /** @@ -1156,19 +1156,19 @@ export class Collection< index, value as KvValue, this, - ) + ); // Update a single document const { result } = await this.handleMany( prefixKey, (doc) => this.updateDocument(doc, data, options), { ...options, take: 1 }, - ) + ); // Return first result, or commit error object if not present return result.at(0) ?? { ok: false, - } + }; } /** @@ -1197,19 +1197,19 @@ export class Collection< options?: T, ): Promise> | DenoKvCommitError> { // Create prefix key - const prefixKey = extendKey(this._keys.secondaryIndex, order as KvId) + const prefixKey = extendKey(this._keys.secondaryIndex, order as KvId); // Update a single document const { result } = await this.handleMany( prefixKey, (doc) => this.updateDocument(doc, data, options), { ...options, take: 1 }, - ) + ); // Return first result, or commit error object if not present return result.at(0) ?? { ok: false, - } + }; } /** @@ -1243,35 +1243,35 @@ export class Collection< ): Promise { // Initiate result and error lists const results: - (CommitResult> | DenoKvCommitError)[] = [] - const errors: unknown[] = [] + (CommitResult> | DenoKvCommitError)[] = []; + const errors: unknown[] = []; // Add each value await allFulfilled( values.map(async (value) => { try { - const result = await this.add(value, options) - results.push(result) + const result = await this.add(value, options); + results.push(result); } catch (e) { - errors.push(e) + errors.push(e); } }), - ) + ); // Throw any caught errors if (errors.length > 0) { - throw errors + throw errors; } // If a commit has failed, return commit error if (!results.every((cr) => cr.ok)) { - return { ok: false } + return { ok: false }; } // Return commit result return { ok: true, - } + }; } /** @@ -1302,39 +1302,39 @@ export class Collection< // Perform quick delete if all documents are to be deleted if (selectsAll(options)) { // Create list iterator and empty keys list, init atomic operation - const iter = this.kv.list({ prefix: this._keys.base }, options) + const iter = this.kv.list({ prefix: this._keys.base }, options); - const keys: DenoKvStrictKey[] = [] - const atomic = new AtomicWrapper(this.kv) + const keys: DenoKvStrictKey[] = []; + const atomic = new AtomicWrapper(this.kv); // Collect all collection entry keys for await (const { key } of iter) { - keys.push(key as DenoKvStrictKey) + keys.push(key as DenoKvStrictKey); } // Set history entries if keeps history if (this._keepsHistory) { for await (const { key } of this.kv.list({ prefix: this._keys.id })) { - const id = getDocumentId(key as DenoKvStrictKey) + const id = getDocumentId(key as DenoKvStrictKey); if (!id) { - continue + continue; } - const historyKey = extendKey(this._keys.history, id, ulid()) + const historyKey = extendKey(this._keys.history, id, ulid()); const historyEntry: HistoryEntry = { type: "delete", timestamp: new Date(), - } + }; - atomic.set(historyKey, historyEntry) + atomic.set(historyKey, historyEntry); } } // Delete all keys and return - keys.forEach((key) => atomic.delete(key)) - await atomic.commit() + keys.forEach((key) => atomic.delete(key)); + await atomic.commit(); } // Execute delete operation for each document entry @@ -1342,10 +1342,10 @@ export class Collection< this._keys.id, (doc) => this.deleteDocuments([doc.id], this._keepsHistory), options, - ) + ); // Return iterator cursor - return { cursor } + return { cursor }; } /** @@ -1375,17 +1375,17 @@ export class Collection< >, ): Promise { // Create prefix key - const prefixKey = extendKey(this._keys.secondaryIndex, order as KvId) + const prefixKey = extendKey(this._keys.secondaryIndex, order as KvId); // Delete documents by secondary index, return iterator cursor const { cursor } = await this.handleMany( prefixKey, (doc) => this.deleteDocuments([doc.id], this._keepsHistory), options, - ) + ); // Return iterator cursor - return { cursor } + return { cursor }; } /** @@ -1418,7 +1418,7 @@ export class Collection< this._keys.id, (doc) => doc, options, - ) + ); } /** @@ -1451,12 +1451,12 @@ export class Collection< ParseId >, ): Promise>>> { - const prefixKey = extendKey(this._keys.secondaryIndex, order as KvId) + const prefixKey = extendKey(this._keys.secondaryIndex, order as KvId); return await this.handleMany( prefixKey, (doc) => doc, options, - ) + ); } /** @@ -1492,10 +1492,10 @@ export class Collection< this._keys.id, (doc) => doc, { ...options, take: 1 }, - ) + ); // Return first result item, or null if not present - return result.at(0) ?? null + return result.at(0) ?? null; } /** @@ -1537,17 +1537,17 @@ export class Collection< index, value as KvValue, this, - ) + ); // Get result list with one item const { result } = await this.handleMany( prefixKey, (doc) => doc, { ...options, take: 1 }, - ) + ); // Return first result item, or null if not present - return result.at(0) ?? null + return result.at(0) ?? null; } /** @@ -1575,17 +1575,17 @@ export class Collection< >, ): Promise> | null> { // Create prefix key - const prefixKey = extendKey(this._keys.secondaryIndex, order as KvId) + const prefixKey = extendKey(this._keys.secondaryIndex, order as KvId); // Get result list with one item const { result } = await this.handleMany( prefixKey, (doc) => doc, { ...options, take: 1 }, - ) + ); // Return first result item, or null if not present - return result.at(0) ?? null + return result.at(0) ?? null; } /** @@ -1620,10 +1620,10 @@ export class Collection< this._keys.id, async (doc) => await fn(doc), options, - ) + ); // Return iterator cursor - return { cursor } + return { cursor }; } /** @@ -1663,17 +1663,17 @@ export class Collection< index, value as KvValue, this, - ) + ); // Execute callback function for each document entry const { cursor } = await this.handleMany( prefixKey, (doc) => fn(doc), options, - ) + ); // Return iterator cursor - return { cursor } + return { cursor }; } /** @@ -1706,17 +1706,17 @@ export class Collection< >, ): Promise { // Create prefix key - const prefixKey = extendKey(this._keys.secondaryIndex, order as KvId) + const prefixKey = extendKey(this._keys.secondaryIndex, order as KvId); // Execute callback function for each document entry const { cursor } = await this.handleMany( prefixKey, (doc) => fn(doc), options, - ) + ); // Return iterator cursor - return { cursor } + return { cursor }; } /** @@ -1753,7 +1753,7 @@ export class Collection< this._keys.id, (doc) => fn(doc), options, - ) + ); } /** @@ -1792,22 +1792,22 @@ export class Collection< >, ): Promise>> { // Serialize and compress index value - const serialized = await this._serializer.serialize(value) - const compressed = await this._serializer.compress(serialized) + const serialized = await this._serializer.serialize(value); + const compressed = await this._serializer.compress(serialized); // Create prefix key const prefixKey = extendKey( this._keys.secondaryIndex, index as KvId, compressed, - ) + ); // Execute callback function for each document entry, return result and cursor return await this.handleMany( prefixKey, (doc) => fn(doc), options, - ) + ); } /** @@ -1843,14 +1843,14 @@ export class Collection< >, ): Promise>> { // Create prefix key - const prefixKey = extendKey(this._keys.secondaryIndex, order as KvId) + const prefixKey = extendKey(this._keys.secondaryIndex, order as KvId); // Execute callback function for each document entry, return result and cursor return await this.handleMany( prefixKey, (doc) => fn(doc), options, - ) + ); } /** @@ -1877,20 +1877,20 @@ export class Collection< >, ): Promise { // Initiate count result - let result = 0 + let result = 0; // Perform efficient count if counting all document entries if (selectsAll(options)) { - const iter = this.kv.list({ prefix: this._keys.id }, options) + const iter = this.kv.list({ prefix: this._keys.id }, options); for await (const _ of iter) { - result++ + result++; } - return result + return result; } // Perform count using many documents handler - await this.handleMany(this._keys.id, () => result++, options) - return result + await this.handleMany(this._keys.id, () => result++, options); + return result; } /** @@ -1919,28 +1919,28 @@ export class Collection< >, ): Promise { // Serialize and compress index value - const serialized = await this._serializer.serialize(value) - const compressed = await this._serializer.compress(serialized) + const serialized = await this._serializer.serialize(value); + const compressed = await this._serializer.compress(serialized); // Create prefix key const prefixKey = extendKey( this._keys.secondaryIndex, index as KvId, compressed, - ) + ); // Initialize count result - let result = 0 + let result = 0; // Update each document by secondary index, add commit result to result list await this.handleMany( prefixKey, () => result++, options, - ) + ); // Return count result - return result + return result; } /** @@ -1970,20 +1970,20 @@ export class Collection< >, ): Promise { // Create prefix key - const prefixKey = extendKey(this._keys.secondaryIndex, order as KvId) + const prefixKey = extendKey(this._keys.secondaryIndex, order as KvId); // Initialize count result - let result = 0 + let result = 0; // Update each document by secondary index, add commit result to result list await this.handleMany( prefixKey, () => result++, options, - ) + ); // Return count result - return result + return result; } /** @@ -2018,10 +2018,10 @@ export class Collection< this._keys.undelivered, data, options, - ) + ); // Enqueue message with options - return await this.kv.enqueue(prep.msg, prep.options) + return await this.kv.enqueue(prep.msg, prep.options); } /** @@ -2056,15 +2056,15 @@ export class Collection< options?: QueueListenerOptions, ): Promise { // Create handler id - const handlerId = createHandlerId(this._keys.base, options?.topic) + const handlerId = createHandlerId(this._keys.base, options?.topic); // Add new handler to specified handlers - const handlers = this.queueHandlers.get(handlerId) ?? [] - handlers.push(handler as QueueMessageHandler) - this.queueHandlers.set(handlerId, handlers) + const handlers = this.queueHandlers.get(handlerId) ?? []; + handlers.push(handler as QueueMessageHandler); + this.queueHandlers.set(handlerId, handlers); // Activate idempotent listener - return await this.idempotentListener() + return await this.idempotentListener(); } /** @@ -2088,12 +2088,12 @@ export class Collection< options?: FindOptions, ): Promise | null> { // Create document key, get document entry - const key = extendKey(this._keys.undelivered, id) - const result = await this.kv.get(key, options) + const key = extendKey(this._keys.undelivered, id); + const result = await this.kv.get(key, options); // If no entry exists, return null if (result.value === null || result.versionstamp === null) { - return null + return null; } // Return document @@ -2101,7 +2101,7 @@ export class Collection< id, versionstamp: result.versionstamp, value: result.value as T, - }) + }); } /** @@ -2116,24 +2116,26 @@ export class Collection< */ async deleteHistory(id: ParseId): Promise { // Initialize atomic operation and create iterators - const atomic = new AtomicWrapper(this.kv) - const historyKeyPrefix = extendKey(this._keys.history, id) - const historySegmentKeyPrefix = extendKey(this._keys.historySegment, id) - const historyIter = this.kv.list({ prefix: historyKeyPrefix }) - const historySegmentIter = this.kv.list({ prefix: historySegmentKeyPrefix }) + const atomic = new AtomicWrapper(this.kv); + const historyKeyPrefix = extendKey(this._keys.history, id); + const historySegmentKeyPrefix = extendKey(this._keys.historySegment, id); + const historyIter = this.kv.list({ prefix: historyKeyPrefix }); + const historySegmentIter = this.kv.list({ + prefix: historySegmentKeyPrefix, + }); // Delete history entries for await (const { key } of historyIter) { - atomic.delete(key as DenoKvStrictKey) + atomic.delete(key as DenoKvStrictKey); } // Delete any history segment entries for await (const { key } of historySegmentIter) { - atomic.delete(key as DenoKvStrictKey) + atomic.delete(key as DenoKvStrictKey); } // Commit atomic operation - await atomic.commit() + await atomic.commit(); } /** @@ -2147,8 +2149,8 @@ export class Collection< * @param id - Id of undelivered document. */ async deleteUndelivered(id: KvId): Promise { - const key = extendKey(this._keys.undelivered, id) - await this.kv.delete(key) + const key = extendKey(this._keys.undelivered, id); + await this.kv.delete(key); } /** @@ -2185,24 +2187,24 @@ export class Collection< fn: (doc: Document> | null) => unknown, options?: WatchOptions, ): { - promise: Promise - cancel: () => Promise + promise: Promise; + cancel: () => Promise; } { - const key = extendKey(this._keys.id, id) + const key = extendKey(this._keys.id, id); return createWatcher(this.kv, options, [key], async (entries) => { - const entry = entries.at(0) + const entry = entries.at(0); // If no entry is found, invoke callback function with null if (!entry) { - await fn(null) - return + await fn(null); + return; } // Construct document and invoke callback function - const doc = await this.constructDocument(entry) - await fn(doc) - }) + const doc = await this.constructDocument(entry); + await fn(doc); + }); } /** @@ -2246,20 +2248,20 @@ export class Collection< fn: (doc: (Document> | null)[]) => unknown, options?: WatchOptions, ): { - promise: Promise - cancel: () => Promise + promise: Promise; + cancel: () => Promise; } { - const keys = ids.map((id) => extendKey(this._keys.id, id)) + const keys = ids.map((id) => extendKey(this._keys.id, id)); return createWatcher(this.kv, options, keys, async (entries) => { // Construct documents const docs = await Array.fromAsync( entries.map((entry) => this.constructDocument(entry)), - ) + ); // Invoke callback function - await fn(docs) - }) + await fn(docs); + }); } /***********************/ @@ -2284,11 +2286,11 @@ export class Collection< ): Promise> | DenoKvCommitError> { // Create id, document key and parse document value const parsed = this._model._transform?.(value as TInput) ?? - this._model.parse(value) + this._model.parse(value); - const docId = id ?? await this._idGenerator(parsed) - const idKey = extendKey(this._keys.id, docId) - return await this.setDoc(docId, idKey, parsed, options) + const docId = id ?? await this._idGenerator(parsed); + const idKey = extendKey(this._keys.id, docId); + return await this.setDoc(docId, idKey, parsed, options); } /** @@ -2308,36 +2310,36 @@ export class Collection< options: SetOptions | undefined, ): Promise> | DenoKvCommitError> { // Initialize atomic operation and keys list - const ids: KvId[] = [] - let docValue: any = value - const isUint8Array = value instanceof Uint8Array - const timeId = ulid() - const operationPool = new AtomicPool() - const indexOperationPool = new AtomicPool() + const ids: KvId[] = []; + let docValue: any = value; + const isUint8Array = value instanceof Uint8Array; + const timeId = ulid(); + const operationPool = new AtomicPool(); + const indexOperationPool = new AtomicPool(); // Check for id collision if (!options?.overwrite) { operationPool.check({ key: idKey, versionstamp: null, - }) + }); } // Serialize if enabled if (this._isSerialized) { const serialized = isUint8Array ? value - : await this._serializer.serialize(value) - const compressed = await this._serializer.compress(serialized) + : await this._serializer.serialize(value); + const compressed = await this._serializer.compress(serialized); // Set segmented entries - let index = 0 + let index = 0; for (let i = 0; i < compressed.length; i += UINT8ARRAY_LENGTH_LIMIT) { - const part = compressed.subarray(i, i + UINT8ARRAY_LENGTH_LIMIT) - const key = extendKey(this._keys.segment, docId, index) - ids.push(index) + const part = compressed.subarray(i, i + UINT8ARRAY_LENGTH_LIMIT); + const key = extendKey(this._keys.segment, docId, index); + ids.push(index); - operationPool.set(key, part, options) + operationPool.set(key, part, options); // Set history segments if keeps history if (this._keepsHistory) { @@ -2346,37 +2348,37 @@ export class Collection< docId, timeId, index, - ) + ); - operationPool.set(historySegmentKey, part) + operationPool.set(historySegmentKey, part); } - index++ + index++; } // Set serialized document value const serializedEntry: SerializedEntry = { ids, isUint8Array, - } + }; - docValue = serializedEntry + docValue = serializedEntry; } // Set document entry - operationPool.set(idKey, docValue, options) + operationPool.set(idKey, docValue, options); // Set history entry if keeps history if (this._keepsHistory) { - const historyKey = extendKey(this._keys.history, docId, timeId) + const historyKey = extendKey(this._keys.history, docId, timeId); const historyEntry: HistoryEntry = { type: "write", timestamp: new Date(), value: docValue, - } + }; - operationPool.set(historyKey, historyEntry) + operationPool.set(historyKey, historyEntry); } // Set indices if is indexable @@ -2388,51 +2390,51 @@ export class Collection< indexOperationPool, this, options, - ) + ); } // Initialize index check, commit result and atomic operation - let indexCheck = false - let cr: DenoKvCommitResult | DenoKvCommitError = { ok: false } + let indexCheck = false; + let cr: DenoKvCommitResult | DenoKvCommitError = { ok: false }; const atomic = options?.batched ? new AtomicWrapper(this.kv) - : this.kv.atomic() + : this.kv.atomic(); // Perform index mutations first if operation is batched, else bind all mutations to main operation if (options?.batched) { - const indexAtomic = this.kv.atomic() - indexOperationPool.bindTo(indexAtomic) - const indexCr = await indexAtomic.commit() - indexCheck = indexCr.ok + const indexAtomic = this.kv.atomic(); + indexOperationPool.bindTo(indexAtomic); + const indexCr = await indexAtomic.commit(); + indexCheck = indexCr.ok; } else { - indexOperationPool.bindTo(atomic) + indexOperationPool.bindTo(atomic); } // Bind remaining mutations to main operation - operationPool.bindTo(atomic) + operationPool.bindTo(atomic); // Commit operation if not batched or if index setters completed successfully if (!options?.batched || indexCheck) { - cr = await atomic.commit() + cr = await atomic.commit(); } // Handle failed operation if (!cr.ok) { // Delete any entries upon failed batched operation if (options?.batched && indexCheck) { - const failedAtomic = new AtomicWrapper(this.kv) + const failedAtomic = new AtomicWrapper(this.kv); if (this._keepsHistory) { - const historyKey = extendKey(this._keys.history, docId, timeId) - failedAtomic.delete(historyKey) + const historyKey = extendKey(this._keys.history, docId, timeId); + failedAtomic.delete(historyKey); } if (this._isSerialized) { - const { ids } = docValue as SerializedEntry + const { ids } = docValue as SerializedEntry; ids.forEach((id) => failedAtomic.delete(extendKey(this._keys.segment, docId, id)) - ) + ); } if (this._isIndexable) { @@ -2441,32 +2443,32 @@ export class Collection< value as KvObject, failedAtomic, this, - ) + ); } - await failedAtomic.commit() + await failedAtomic.commit(); } // Return commit error if no remaining retry attempts - const retry = options?.retry ?? 0 + const retry = options?.retry ?? 0; if (!retry) { return { ok: false, - } + }; } // Retry operation and decrement retry count return await this.setDoc(docId, idKey, value, { ...options, retry: retry - 1, - }) + }); } // Return commit result return { ...cr, id: docId, - } + }; } /** @@ -2483,50 +2485,50 @@ export class Collection< options: UpdateOptions | undefined, ): Promise> | DenoKvCommitError> { // Get document value, delete document entry - const { value, id } = doc + const { value, id } = doc; // If indexable, check for index collisions and delete exisitng index entries if (this._isIndexable) { - const atomic = this.kv.atomic() + const atomic = this.kv.atomic(); await checkIndices( data as KvObject, atomic, this, - ) + ); await deleteIndices( id, doc.value as KvObject, atomic, this, - ) + ); - const cr = await atomic.commit() + const cr = await atomic.commit(); if (!cr.ok) { return { ok: false, - } + }; } } // If serialized, delete existing segment entries if (this._isSerialized) { - const atomic = new AtomicWrapper(this.kv) - const keyPrefix = extendKey(this._keys.segment, id) - const iter = this.kv.list({ prefix: keyPrefix }) + const atomic = new AtomicWrapper(this.kv); + const keyPrefix = extendKey(this._keys.segment, id); + const iter = this.kv.list({ prefix: keyPrefix }); for await (const { key } of iter) { - atomic.delete(key as DenoKvStrictKey) + atomic.delete(key as DenoKvStrictKey); } - await atomic.commit() + await atomic.commit(); } // Determine update strategy and check value type - const strategy = options?.strategy ?? DEFAULT_UPDATE_STRATEGY - const isObject = isKvObject(value) + const strategy = options?.strategy ?? DEFAULT_UPDATE_STRATEGY; + const isObject = isKvObject(value); // Handle different update strategies const updated = strategy === "replace" @@ -2536,10 +2538,10 @@ export class Collection< ...value as KvObject, ...data as KvObject, } - : deepMerge({ value }, { value: data }, options?.mergeOptions).value + : deepMerge({ value }, { value: data }, options?.mergeOptions).value; // Parse updated value - const parsed = this._model.parse(updated as any) + const parsed = this._model.parse(updated as any); // Set new document value return await this.setDoc( @@ -2550,7 +2552,7 @@ export class Collection< ...options, overwrite: true, }, - ) + ); } /** @@ -2563,52 +2565,52 @@ export class Collection< { key, value, versionstamp }: DenoKvEntryMaybe, ) { if (!versionstamp) { - return null + return null; } - const indexedDocId = (value as IndexDataEntry)?.__id__ + const indexedDocId = (value as IndexDataEntry)?.__id__; const docId = indexedDocId ?? - getDocumentId(key as DenoKvStrictKey) + getDocumentId(key as DenoKvStrictKey); if (!docId) { - return null + return null; } if (this._isSerialized) { // Get document parts - const { ids, isUint8Array } = value as SerializedEntry + const { ids, isUint8Array } = value as SerializedEntry; const keys = ids.map((segId) => extendKey(this._keys.segment, docId, segId) - ) + ); - const docEntries = await kvGetMany(keys, this.kv) + const docEntries = await kvGetMany(keys, this.kv); // Concatenate chunks - const data = concat(docEntries.map((entry) => entry.value as Uint8Array)) + const data = concat(docEntries.map((entry) => entry.value as Uint8Array)); // Decompress and deserialize - const serialized = await this._serializer.decompress(data) + const serialized = await this._serializer.decompress(data); const deserialized = isUint8Array ? serialized as TOutput - : await this._serializer.deserialize(serialized) + : await this._serializer.deserialize(serialized); // Return parsed document return new Document>(this._model, { id: docId as ParseId, value: deserialized, versionstamp, - }) + }); } // Remove id from value and return parsed document if indexed entry if (typeof indexedDocId !== "undefined") { - const { __id__, ...val } = value as any + const { __id__, ...val } = value as any; return new Document>(this._model, { id: docId as ParseId, value: val as TOutput, versionstamp, - }) + }); } // Return parsed document @@ -2616,7 +2618,7 @@ export class Collection< id: docId as ParseId, value: value as TOutput, versionstamp, - }) + }); } /** @@ -2635,67 +2637,67 @@ export class Collection< | undefined, ) { // Create list iterator with given options - const selector = createListSelector(prefixKey, options) - const listOptions = createListOptions(options) - const iter = this.kv.list(selector, listOptions) + const selector = createListSelector(prefixKey, options); + const listOptions = createListOptions(options); + const iter = this.kv.list(selector, listOptions); // Initiate lists - const docs: Document>[] = [] - const result: Awaited[] = [] - const errors: unknown[] = [] - const take = options?.take + const docs: Document>[] = []; + const result: Awaited[] = []; + const errors: unknown[] = []; + const take = options?.take; // Loop over each document entry - let count = -1 - const offset = options?.offset ?? 0 + let count = -1; + const offset = options?.offset ?? 0; for await (const entry of iter) { // Increment count - count++ + count++; // Skip by offset if (count < offset) { - continue + continue; } // Check if result limit is reached if (take && docs.length >= take) { - break + break; } // Construct document from entry - const doc = await this.constructDocument(entry) + const doc = await this.constructDocument(entry); // Continue if document is not constructed if (!doc) { - continue + continue; } // Filter document and add to documents list - const filter = options?.filter + const filter = options?.filter; if (!filter || filter(doc)) { - docs.push(doc) + docs.push(doc); } } // Execute callback function for each document await allFulfilled(docs.map(async (doc) => { try { - result.push(await fn(doc)) + result.push(await fn(doc)); } catch (e) { - errors.push(e) + errors.push(e); } - })) + })); // Throw any caught errors if (errors.length > 0) { - throw errors + throw errors; } // Return result and current iterator cursor return { result, cursor: iter.cursor || undefined, - } + }; } /** @@ -2707,102 +2709,102 @@ export class Collection< */ private async deleteDocuments(ids: KvId[], recordHistory: boolean) { // Initialize atomic operation - const atomic = new AtomicWrapper(this.kv) + const atomic = new AtomicWrapper(this.kv); // Set delete history entry if recordHistory is true if (recordHistory) { ids.forEach((id) => { - const historyKey = extendKey(this._keys.history, id, ulid()) + const historyKey = extendKey(this._keys.history, id, ulid()); const historyEntry: HistoryEntry = { type: "delete", timestamp: new Date(), - } + }; - atomic.set(historyKey, historyEntry) - }) + atomic.set(historyKey, historyEntry); + }); } if (this._isIndexable && this._isSerialized) { // Run delete operations for each id await allFulfilled(ids.map(async (id) => { // Create document id key, get entry and construct document - const idKey = extendKey(this._keys.id, id) - const entry = await this.kv.get(idKey) - const doc = await this.constructDocument(entry) + const idKey = extendKey(this._keys.id, id); + const entry = await this.kv.get(idKey); + const doc = await this.constructDocument(entry); // Delete document entries - atomic.delete(idKey) + atomic.delete(idKey); if (entry.value) { const keys = (entry.value as SerializedEntry).ids.map((segId) => extendKey(this._keys.segment, id, segId) - ) + ); - keys.forEach((key) => atomic.delete(key)) + keys.forEach((key) => atomic.delete(key)); } if (doc) { - await deleteIndices(id, doc.value as KvObject, atomic, this) + await deleteIndices(id, doc.value as KvObject, atomic, this); } - })) + })); // Commit the operation - await atomic.commit() - return + await atomic.commit(); + return; } if (this._isIndexable) { // Run delete operations for each id await allFulfilled(ids.map(async (id) => { // Create idKey, get document value - const idKey = extendKey(this._keys.id, id) - const { value } = await this.kv.get(idKey) + const idKey = extendKey(this._keys.id, id); + const { value } = await this.kv.get(idKey); // If no value, abort delete if (!value) { - return + return; } // Delete document entries - atomic.delete(idKey) - await deleteIndices(id, value as KvObject, atomic, this) - })) + atomic.delete(idKey); + await deleteIndices(id, value as KvObject, atomic, this); + })); // Commit the operation - await atomic.commit() - return + await atomic.commit(); + return; } if (this._isSerialized) { // Perform delete for each id await allFulfilled(ids.map(async (id) => { // Create document id key, get document value - const idKey = extendKey(this._keys.id, id) - const { value } = await this.kv.get(idKey) + const idKey = extendKey(this._keys.id, id); + const { value } = await this.kv.get(idKey); // If no value, abort delete if (!value) { - return + return; } // Delete document entries - atomic.delete(idKey) + atomic.delete(idKey); const keys = (value as SerializedEntry).ids.map((segId) => extendKey(this._keys.segment, id, segId) - ) + ); - keys.forEach((key) => atomic.delete(key)) - })) + keys.forEach((key) => atomic.delete(key)); + })); // Commit the operation - await atomic.commit() - return + await atomic.commit(); + return; } // Perform delete for each id and commit the operation - ids.forEach((id) => atomic.delete(extendKey(this._keys.id, id))) - await atomic.commit() + ids.forEach((id) => atomic.delete(extendKey(this._keys.id, id))); + await atomic.commit(); } } diff --git a/src/constants.ts b/src/constants.ts index e167e6f..a39186b 100644 --- a/src/constants.ts +++ b/src/constants.ts @@ -1,43 +1,43 @@ -import type { UpdateStrategy } from "./types.ts" +import type { UpdateStrategy } from "./types.ts"; // Key prefixes -export const KVDEX_KEY_PREFIX = "__kvdex__" +export const KVDEX_KEY_PREFIX = "__kvdex__"; -export const ID_KEY_PREFIX = "__id__" +export const ID_KEY_PREFIX = "__id__"; -export const PRIMARY_INDEX_KEY_PREFIX = "__index_primary__" +export const PRIMARY_INDEX_KEY_PREFIX = "__index_primary__"; -export const SECONDARY_INDEX_KEY_PREFIX = "__index_secondary__" +export const SECONDARY_INDEX_KEY_PREFIX = "__index_secondary__"; -export const SEGMENT_KEY_PREFIX = "__segment__" +export const SEGMENT_KEY_PREFIX = "__segment__"; -export const UNDELIVERED_KEY_PREFIX = "__undelivered__" +export const UNDELIVERED_KEY_PREFIX = "__undelivered__"; -export const HISTORY_KEY_PREFIX = "__history__" +export const HISTORY_KEY_PREFIX = "__history__"; // Fixed limits -export const ATOMIC_OPERATION_MUTATION_LIMIT = 1_000 +export const ATOMIC_OPERATION_MUTATION_LIMIT = 1_000; -export const ATOMIC_OPERATION_SIZE_LIMIT = 800 +export const ATOMIC_OPERATION_SIZE_LIMIT = 800; -export const ATOMIC_OPERATION_KEY_SIZE_LIMIT = 80 +export const ATOMIC_OPERATION_KEY_SIZE_LIMIT = 80; -export const ATOMIC_OPERTION_CHECK_LIMIT = 10 +export const ATOMIC_OPERTION_CHECK_LIMIT = 10; -export const GET_MANY_KEY_LIMIT = 10 +export const GET_MANY_KEY_LIMIT = 10; -export const UINT8ARRAY_LENGTH_LIMIT = 65_536 +export const UINT8ARRAY_LENGTH_LIMIT = 65_536; // Defaults -export const DEFAULT_INTERVAL_RETRY = 10 +export const DEFAULT_INTERVAL_RETRY = 10; -export const DEFAULT_LOOP_RETRY = 10 +export const DEFAULT_LOOP_RETRY = 10; -export const DEFAULT_UPDATE_STRATEGY: UpdateStrategy = "merge" +export const DEFAULT_UPDATE_STRATEGY: UpdateStrategy = "merge"; // Misc -export const COMPRESSION_QUALITY_LEVEL = 1 +export const COMPRESSION_QUALITY_LEVEL = 1; -export const MIN_INTERVAL_START_DELAY = 1_000 +export const MIN_INTERVAL_START_DELAY = 1_000; -export const MIN_LOOP_START_DELAY = 1_000 +export const MIN_LOOP_START_DELAY = 1_000; diff --git a/src/deps.ts b/src/deps.ts index eb610f2..df330e6 100644 --- a/src/deps.ts +++ b/src/deps.ts @@ -1,8 +1,8 @@ -export { brotliCompressSync, brotliDecompressSync, constants } from "node:zlib" -export { ulid } from "jsr:@std/ulid@^1.0.0" -export { concat } from "jsr:@std/bytes@^1.0.1/concat" +export { brotliCompressSync, brotliDecompressSync, constants } from "node:zlib"; +export { ulid } from "jsr:@std/ulid@^1.0.0"; +export { concat } from "jsr:@std/bytes@^1.0.1/concat"; export { deepMerge, type DeepMergeOptions, -} from "jsr:@std/collections@^1.0.2/deep-merge" -export { deserialize, serialize } from "node:v8" +} from "jsr:@std/collections@^1.0.2/deep-merge"; +export { deserialize, serialize } from "node:v8"; diff --git a/src/document.ts b/src/document.ts index 3a36288..eff0bc0 100644 --- a/src/document.ts +++ b/src/document.ts @@ -5,22 +5,22 @@ import type { KvObject, KvValue, Model, -} from "./types.ts" -import { isKvObject } from "./utils.ts" +} from "./types.ts"; +import { isKvObject } from "./utils.ts"; /** Represents a database entry with id, versionstamp and value. */ export class Document { - readonly id: TId - readonly versionstamp: string - readonly value: TOutput + readonly id: TId; + readonly versionstamp: string; + readonly value: TOutput; constructor( model: Model, { id, versionstamp, value }: DocumentData, ) { - this.id = id - this.versionstamp = versionstamp - this.value = model.parse(value) + this.id = id; + this.versionstamp = versionstamp; + this.value = model.parse(value); } /** @@ -58,13 +58,13 @@ export class Document { id: this.id, versionstamp: this.versionstamp, ...this.value as KvObject, - } as unknown as FlatDocumentData + } as unknown as FlatDocumentData; } return { id: this.id, versionstamp: this.versionstamp, value: this.value, - } as unknown as FlatDocumentData + } as unknown as FlatDocumentData; } } diff --git a/src/errors.ts b/src/errors.ts index ee27056..a6b3416 100644 --- a/src/errors.ts +++ b/src/errors.ts @@ -1,6 +1,6 @@ /** Error representing the use, selection or creation of an invalid collection in a given context. */ export class InvalidCollectionError extends Error { constructor(message?: string, options?: ErrorOptions) { - super(message, options) + super(message, options); } } diff --git a/src/ext/kv/atomic.ts b/src/ext/kv/atomic.ts index a2d94d0..2a8fcd8 100644 --- a/src/ext/kv/atomic.ts +++ b/src/ext/kv/atomic.ts @@ -6,19 +6,19 @@ import type { DenoKvEnqueueOptions, DenoKvSetOptions, DenoKvStrictKey, -} from "../../types.ts" -import type { MapKv } from "./map_kv.ts" -import { createVersionstamp } from "./utils.ts" +} from "../../types.ts"; +import type { MapKv } from "./map_kv.ts"; +import { createVersionstamp } from "./utils.ts"; export class MapKvAtomicOperation implements DenoAtomicOperation { - private kv: MapKv - private checks: (() => boolean)[] - private ops: ((versionstamp: string) => void)[] + private kv: MapKv; + private checks: (() => boolean)[]; + private ops: ((versionstamp: string) => void)[]; constructor(kv: MapKv) { - this.kv = kv - this.checks = [] - this.ops = [] + this.kv = kv; + this.checks = []; + this.ops = []; } set( @@ -28,115 +28,115 @@ export class MapKvAtomicOperation implements DenoAtomicOperation { ): DenoAtomicOperation { this.ops.push((versionstamp) => this.kv._set(key, value, versionstamp, options) - ) - return this + ); + return this; } delete(key: DenoKvStrictKey): DenoAtomicOperation { - this.ops.push(() => this.kv.delete(key)) - return this + this.ops.push(() => this.kv.delete(key)); + return this; } min(key: DenoKvStrictKey, n: bigint): DenoAtomicOperation { this.ops.push((versionstamp) => { - const { value } = this.kv.get(key) + const { value } = this.kv.get(key); if (!value) { - this.kv._set(key, { value: n }, versionstamp) - return + this.kv._set(key, { value: n }, versionstamp); + return; } - const val = (value as any).value + const val = (value as any).value; if (typeof val !== "bigint") { - throw new Error("Min operation can only be performed on KvU64 value") + throw new Error("Min operation can only be performed on KvU64 value"); } this.kv._set(key, { value: n < val ? n : val, - }, versionstamp) - }) + }, versionstamp); + }); - return this + return this; } max(key: DenoKvStrictKey, n: bigint): DenoAtomicOperation { this.ops.push((versionstamp) => { - const { value } = this.kv.get(key) + const { value } = this.kv.get(key); if (!value) { - this.kv._set(key, { value: n }, versionstamp) - return + this.kv._set(key, { value: n }, versionstamp); + return; } - const val = (value as any).value + const val = (value as any).value; if (typeof val !== "bigint") { - throw new Error("Max operation can only be performed on KvU64 value") + throw new Error("Max operation can only be performed on KvU64 value"); } this.kv._set(key, { value: n > val ? n : val, - }, versionstamp) - }) + }, versionstamp); + }); - return this + return this; } sum(key: DenoKvStrictKey, n: bigint): DenoAtomicOperation { this.ops.push((versionstamp) => { - const { value } = this.kv.get(key) + const { value } = this.kv.get(key); if (!value) { - this.kv._set(key, { value: n }, versionstamp) - return + this.kv._set(key, { value: n }, versionstamp); + return; } - const val = (value as any).value + const val = (value as any).value; if (typeof val !== "bigint") { - throw new Error("Sum operation can only be performed on KvU64 value") + throw new Error("Sum operation can only be performed on KvU64 value"); } this.kv._set(key, { value: n + val, - }, versionstamp) - }) + }, versionstamp); + }); - return this + return this; } check(...checks: DenoAtomicCheck[]): DenoAtomicOperation { checks.forEach(({ key, versionstamp }) => { this.checks.push(() => { - const entry = this.kv.get(key) - return entry.versionstamp === versionstamp - }) - }) + const entry = this.kv.get(key); + return entry.versionstamp === versionstamp; + }); + }); - return this + return this; } enqueue(value: unknown, options?: DenoKvEnqueueOptions): DenoAtomicOperation { this.ops.push((versionstamp) => { - this.kv._enqueue(value, versionstamp, options) - }) + this.kv._enqueue(value, versionstamp, options); + }); - return this + return this; } commit(): DenoKvCommitError | DenoKvCommitResult { const passedChecks = this.checks .map((check) => check()) - .every((check) => check) + .every((check) => check); if (!passedChecks) { return { ok: false, - } + }; } - const versionstamp = createVersionstamp() + const versionstamp = createVersionstamp(); - this.ops.forEach((op) => op(versionstamp)) + this.ops.forEach((op) => op(versionstamp)); return { ok: true, versionstamp, - } + }; } } diff --git a/src/ext/kv/map_kv.ts b/src/ext/kv/map_kv.ts index 9f8fb55..a0be60c 100644 --- a/src/ext/kv/map_kv.ts +++ b/src/ext/kv/map_kv.ts @@ -12,12 +12,12 @@ import type { DenoKvSetOptions, DenoKvStrictKey, DenoKvWatchOptions, -} from "../../types.ts" -import { jsonParse, jsonStringify } from "../../utils.ts" -import { MapKvAtomicOperation } from "./atomic.ts" -import { Watcher } from "./watcher.ts" -import { createVersionstamp, keySort } from "./utils.ts" -import type { BasicMap, MapKvOptions } from "./types.ts" +} from "../../types.ts"; +import { jsonParse, jsonStringify } from "../../utils.ts"; +import { MapKvAtomicOperation } from "./atomic.ts"; +import { Watcher } from "./watcher.ts"; +import { createVersionstamp, keySort } from "./utils.ts"; +import type { BasicMap, MapKvOptions } from "./types.ts"; /** * KV instance utilising a `BasicMap` as it's backend. @@ -45,57 +45,57 @@ import type { BasicMap, MapKvOptions } from "./types.ts" * ``` */ export class MapKv implements DenoKv { - private map: BasicMap> - private clearOnClose: boolean - private watchers: Watcher[] - private listenHandlers: ((msg: unknown) => unknown)[] + private map: BasicMap>; + private clearOnClose: boolean; + private watchers: Watcher[]; + private listenHandlers: ((msg: unknown) => unknown)[]; private listener: | { - promise: Promise - resolve: () => void + promise: Promise; + resolve: () => void; } - | undefined + | undefined; constructor({ map = new Map(), entries, clearOnClose = false, }: MapKvOptions = {}) { - this.map = map - this.clearOnClose = clearOnClose - this.watchers = [] - this.listenHandlers = [] + this.map = map; + this.clearOnClose = clearOnClose; + this.watchers = []; + this.listenHandlers = []; entries?.forEach(({ key, ...data }) => this.map.set(jsonStringify(key), data) - ) + ); } close(): void { - this.watchers.forEach((w) => w.cancel()) - this.listener?.resolve() - if (this.clearOnClose) this.map.clear() + this.watchers.forEach((w) => w.cancel()); + this.listener?.resolve(); + if (this.clearOnClose) this.map.clear(); } delete(key: DenoKvStrictKey) { - this.map.delete(jsonStringify(key)) - this.watchers.forEach((w) => w.update(key)) + this.map.delete(jsonStringify(key)); + this.watchers.forEach((w) => w.update(key)); } get(key: DenoKvStrictKey): DenoKvEntryMaybe { const data = this.map.get(jsonStringify(key)) ?? { value: null, versionstamp: null, - } + }; return { ...data, key: key as DenoKvLaxKey, - } + }; } getMany(keys: DenoKvStrictKey[]): DenoKvEntryMaybe[] { - return keys.map((key) => this.get(key)) + return keys.map((key) => this.get(key)); } set( @@ -103,116 +103,116 @@ export class MapKv implements DenoKv { value: unknown, options?: DenoKvSetOptions, ): DenoKvCommitResult { - return this._set(key, value, createVersionstamp(), options) + return this._set(key, value, createVersionstamp(), options); } list( selector: DenoKvListSelector, options?: DenoKvListOptions, ): DenoKvListIterator { - let entries = Array.from(this.map.entries()) - const start = (selector as any).start as DenoKvStrictKey | undefined - const end = (selector as any).end as DenoKvStrictKey | undefined - const prefix = (selector as any).prefix as DenoKvStrictKey | undefined + let entries = Array.from(this.map.entries()); + const start = (selector as any).start as DenoKvStrictKey | undefined; + const end = (selector as any).end as DenoKvStrictKey | undefined; + const prefix = (selector as any).prefix as DenoKvStrictKey | undefined; entries.sort(([k1], [k2]) => { - const key1 = jsonParse(k1) - const key2 = jsonParse(k2) - return keySort(key1, key2) - }) + const key1 = jsonParse(k1); + const key2 = jsonParse(k2); + return keySort(key1, key2); + }); if (options?.reverse) { - entries.reverse() + entries.reverse(); } if (prefix && prefix.length > 0) { entries = entries.filter(([key]) => { - const parsedKey = jsonParse(key) - const keyPrefix = parsedKey.slice(0, prefix.length) - return jsonStringify(keyPrefix) === jsonStringify(prefix) - }) + const parsedKey = jsonParse(key); + const keyPrefix = parsedKey.slice(0, prefix.length); + return jsonStringify(keyPrefix) === jsonStringify(prefix); + }); } if (start) { const index = entries.findIndex( ([key]) => key === jsonStringify(start), - ) + ); if (index) { - entries = entries.slice(index) + entries = entries.slice(index); } } if (end) { const index = entries.findIndex( ([key]) => key === jsonStringify(end), - ) + ); if (index) { - entries = entries.slice(0, index) + entries = entries.slice(0, index); } } if (options?.cursor) { const index = entries.findIndex( ([key]) => key === options.cursor, - ) + ); if (index) { - entries = entries.slice(index) + entries = entries.slice(index); } } const iter = async function* () { - let count = 0 + let count = 0; for (const [key, entry] of entries) { if (options?.limit !== undefined && count >= options?.limit) { - return + return; } yield { key: jsonParse(key) as DenoKvLaxKey, ...entry, - } + }; - count++ + count++; } - } + }; - const cursorEntry = options?.limit ? entries.at(options?.limit) : undefined - const cursor = cursorEntry ? cursorEntry[0] : "" - return Object.assign(iter(), { cursor }) + const cursorEntry = options?.limit ? entries.at(options?.limit) : undefined; + const cursor = cursorEntry ? cursorEntry[0] : ""; + return Object.assign(iter(), { cursor }); } listenQueue(handler: (value: unknown) => unknown): Promise { - this.listenHandlers.push(handler) + this.listenHandlers.push(handler); if (!this.listener) { - this.listener = Promise.withResolvers() + this.listener = Promise.withResolvers(); } - return this.listener.promise + return this.listener.promise; } enqueue( value: unknown, options?: DenoKvEnqueueOptions, ): Promise | DenoKvCommitResult { - return this._enqueue(value, createVersionstamp(), options) + return this._enqueue(value, createVersionstamp(), options); } watch( keys: DenoKvStrictKey[], options?: DenoKvWatchOptions, ): ReadableStream { - const watcher = new Watcher(this, keys, options) - this.watchers.push(watcher) - return watcher.stream + const watcher = new Watcher(this, keys, options); + this.watchers.push(watcher); + return watcher.stream; } atomic(): DenoAtomicOperation { - return new MapKvAtomicOperation(this) + return new MapKvAtomicOperation(this); } _set( @@ -224,18 +224,18 @@ export class MapKv implements DenoKv { this.map.set(jsonStringify(key), { value, versionstamp: versionstamp, - }) + }); - this.watchers.forEach((w) => w.update(key)) + this.watchers.forEach((w) => w.update(key)); if (options?.expireIn !== undefined) { - setTimeout(() => this.delete(key), options.expireIn) + setTimeout(() => this.delete(key), options.expireIn); } return { ok: true, versionstamp, - } + }; } _enqueue( @@ -244,12 +244,12 @@ export class MapKv implements DenoKv { options?: DenoKvEnqueueOptions, ): Promise | DenoKvCommitResult { setTimeout(async () => { - await Promise.all(this.listenHandlers.map((h) => h(value))) - }, options?.delay ?? 0) + await Promise.all(this.listenHandlers.map((h) => h(value))); + }, options?.delay ?? 0); return { ok: true, versionstamp, - } + }; } } diff --git a/src/ext/kv/mod.ts b/src/ext/kv/mod.ts index 400ca04..cf42941 100644 --- a/src/ext/kv/mod.ts +++ b/src/ext/kv/mod.ts @@ -49,7 +49,7 @@ * ``` */ -export { MapKv } from "./map_kv.ts" -export { StorageAdapter } from "./storage_adapter.ts" -export { MapKvAtomicOperation } from "./atomic.ts" -export type * from "./types.ts" +export { MapKv } from "./map_kv.ts"; +export { StorageAdapter } from "./storage_adapter.ts"; +export { MapKvAtomicOperation } from "./atomic.ts"; +export type * from "./types.ts"; diff --git a/src/ext/kv/storage_adapter.ts b/src/ext/kv/storage_adapter.ts index 4afca64..39409ab 100644 --- a/src/ext/kv/storage_adapter.ts +++ b/src/ext/kv/storage_adapter.ts @@ -1,5 +1,5 @@ -import { jsonParse, jsonStringify } from "../../utils.ts" -import type { BasicMap } from "./types.ts" +import { jsonParse, jsonStringify } from "../../utils.ts"; +import type { BasicMap } from "./types.ts"; /** * BasicMap adapter for Storage. @@ -21,40 +21,40 @@ import type { BasicMap } from "./types.ts" * ``` */ export class StorageAdapter implements BasicMap { - private storage: Storage + private storage: Storage; constructor(storage: Storage = localStorage) { - this.storage = storage + this.storage = storage; } set(key: K, value: V): void { - this.storage.setItem(jsonStringify(key), jsonStringify(value)) + this.storage.setItem(jsonStringify(key), jsonStringify(value)); } get(key: K): V | undefined { - const valStr = this.storage.getItem(jsonStringify(key)) - return !valStr ? undefined : jsonParse(valStr) + const valStr = this.storage.getItem(jsonStringify(key)); + return !valStr ? undefined : jsonParse(valStr); } delete(key: K): void { - this.storage.removeItem(jsonStringify(key)) + this.storage.removeItem(jsonStringify(key)); } *entries(): IterableIterator<[K, V]> { for (let i = 0; i < this.storage.length; i++) { - const keyStr = this.storage.key(i) - if (!keyStr) return + const keyStr = this.storage.key(i); + if (!keyStr) return; - const valStr = this.storage.getItem(keyStr) - if (!valStr) return + const valStr = this.storage.getItem(keyStr); + if (!valStr) return; - const key = jsonParse(keyStr) - const value = jsonParse(valStr) - yield [key, value] + const key = jsonParse(keyStr); + const value = jsonParse(valStr); + yield [key, value]; } } clear(): void { - this.storage.clear() + this.storage.clear(); } } diff --git a/src/ext/kv/types.ts b/src/ext/kv/types.ts index 3e82ec5..e08c195 100644 --- a/src/ext/kv/types.ts +++ b/src/ext/kv/types.ts @@ -1,4 +1,4 @@ -import type { DenoKvEntry } from "../../types.ts" +import type { DenoKvEntry } from "../../types.ts"; /** Interface for basic map methods */ export type BasicMap = { @@ -9,7 +9,7 @@ export type BasicMap = { * @param value - Value of the entry. * @returns void */ - set(key: K, value: V): void + set(key: K, value: V): void; /** * Get a key/value entry from the map. @@ -17,7 +17,7 @@ export type BasicMap = { * @param key - Key that identifies the entry. * @returns The entry value or undefined if it does not exist in the map. */ - get(key: K): V | undefined + get(key: K): V | undefined; /** * Delete a key/value entry from the map. @@ -25,18 +25,18 @@ export type BasicMap = { * @param key - Key that identifies the entry. * @returns void */ - delete(key: K): void + delete(key: K): void; /** * Get an iterator of the key/value entries in the map. * * @returns An IterableIterator of [key, value] entries. */ - entries(): IterableIterator<[K, V]> + entries(): IterableIterator<[K, V]>; /** Removes all key/value entries from the map. */ - clear(): void -} + clear(): void; +}; /** Options when constructing a new MapKv instance. */ export type MapKvOptions = { @@ -45,15 +45,15 @@ export type MapKvOptions = { * * @default new Map() */ - map?: BasicMap + map?: BasicMap; /** Initial KV entries. */ - entries?: DenoKvEntry[] + entries?: DenoKvEntry[]; /** * Whether the underlying map should be cleared or not when the store is closed. * * @default false */ - clearOnClose?: boolean -} + clearOnClose?: boolean; +}; diff --git a/src/ext/kv/utils.ts b/src/ext/kv/utils.ts index d0883cf..c62e6b8 100644 --- a/src/ext/kv/utils.ts +++ b/src/ext/kv/utils.ts @@ -1,43 +1,43 @@ -import { ulid } from "../../deps.ts" -import type { DenoKvStrictKey, DenoKvStrictKeyPart } from "../../types.ts" +import { ulid } from "../../deps.ts"; +import type { DenoKvStrictKey, DenoKvStrictKeyPart } from "../../types.ts"; export function createVersionstamp() { - return ulid() + return ulid(); } export function keySort(key1: DenoKvStrictKey, key2: DenoKvStrictKey): number { for (let i = 0; i < Math.min(key1.length, key2.length); i++) { - const p1 = key1.at(i) - const p2 = key2.at(i) + const p1 = key1.at(i); + const p2 = key2.at(i); if (p1 === undefined) { - return -1 + return -1; } if (p2 === undefined) { - return 1 + return 1; } - const typeSorted = sortByType(p1, p2) + const typeSorted = sortByType(p1, p2); if (typeSorted !== 0) { - return typeSorted + return typeSorted; } - const valueSorted = sortByValue(p1, p2) + const valueSorted = sortByValue(p1, p2); if (valueSorted !== 0) { - return valueSorted + return valueSorted; } } if (key1.length < key2.length) { - return -1 + return -1; } if (key1.length > key2.length) { - return 1 + return 1; } - return 0 + return 0; } const typeMap = { @@ -49,15 +49,15 @@ const typeMap = { function: 5, symbol: 5, undefined: 5, -} +}; function sortByType( part1: DenoKvStrictKeyPart, part2: DenoKvStrictKeyPart, ): number { - const t1 = typeMap[typeof part1] - const t2 = typeMap[typeof part2] - return t1 - t2 + const t1 = typeMap[typeof part1]; + const t2 = typeMap[typeof part2]; + return t1 - t2; } function sortByValue( @@ -65,81 +65,81 @@ function sortByValue( part2: DenoKvStrictKeyPart, ) { if (typeof part2 !== typeof part2) { - throw Error("Cannot compare values of different type") + throw Error("Cannot compare values of different type"); } switch (typeof part1) { case "object": { - return sortByUint8Array(part1, part2 as Uint8Array) + return sortByUint8Array(part1, part2 as Uint8Array); } case "string": { - return sortByString(part1, part2 as string) + return sortByString(part1, part2 as string); } case "number": { - return sortByNumber(part1, part2 as number) + return sortByNumber(part1, part2 as number); } case "bigint": { - return sortByBigint(part1, part2 as bigint) + return sortByBigint(part1, part2 as bigint); } case "boolean": { - return sortByBoolean(part1, part2 as boolean) + return sortByBoolean(part1, part2 as boolean); } default: { - return 0 + return 0; } } } function sortByUint8Array(u1: Uint8Array, u2: Uint8Array) { for (let i = 0; i < Math.min(u1.length, u2.length); i++) { - const b1 = u1.at(i) - const b2 = u2.at(i) + const b1 = u1.at(i); + const b2 = u2.at(i); if (b1 === undefined) { - return -1 + return -1; } if (b2 === undefined) { - return 1 + return 1; } if (b2 > b1) { - return -1 + return -1; } if (b2 < b1) { - return 1 + return 1; } } if (u1.length < u2.length) { - return -1 + return -1; } if (u1.length > u2.length) { - return 1 + return 1; } - return 0 + return 0; } function sortByString(str1: string, str2: string): number { - return str1.localeCompare(str2) + return str1.localeCompare(str2); } function sortByNumber(n1: number, n2: number): number { - return n1 - n2 + return n1 - n2; } function sortByBigint(n1: bigint, n2: bigint): number { - return n1 < n2 ? -1 : n1 > n2 ? 1 : 0 + return n1 < n2 ? -1 : n1 > n2 ? 1 : 0; } function sortByBoolean(b1: boolean, b2: boolean): number { - return Number(b1) - Number(b2) + return Number(b1) - Number(b2); } diff --git a/src/ext/kv/watcher.ts b/src/ext/kv/watcher.ts index 9bb9f00..a75ecff 100644 --- a/src/ext/kv/watcher.ts +++ b/src/ext/kv/watcher.ts @@ -1,85 +1,89 @@ -import type { DenoKvWatchOptions } from "../../../mod.ts" -import type { DenoKvEntryMaybe, DenoKvStrictKey } from "../../types.ts" -import { jsonStringify } from "../../utils.ts" -import type { MapKv } from "./map_kv.ts" +import type { DenoKvWatchOptions } from "../../../mod.ts"; +import type { DenoKvEntryMaybe, DenoKvStrictKey } from "../../types.ts"; +import { jsonStringify } from "../../utils.ts"; +import type { MapKv } from "./map_kv.ts"; export class Watcher { - private kv: MapKv - private keys: DenoKvStrictKey[] - private options?: DenoKvWatchOptions - private listener: ReturnType> - private previousEntries: DenoKvEntryMaybe[] - readonly stream: ReadableStream + private kv: MapKv; + private keys: DenoKvStrictKey[]; + private options?: DenoKvWatchOptions; + private listener: ReturnType< + typeof Promise.withResolvers + >; + private previousEntries: DenoKvEntryMaybe[]; + readonly stream: ReadableStream; constructor( kv: MapKv, keys: DenoKvStrictKey[], options?: DenoKvWatchOptions, ) { - this.kv = kv - this.keys = keys - this.options = options + this.kv = kv; + this.keys = keys; + this.options = options; - const previousEntries = kv.getMany(keys) - this.previousEntries = previousEntries + const previousEntries = kv.getMany(keys); + this.previousEntries = previousEntries; - this.listener = Promise.withResolvers() - const listener = this.listener + this.listener = Promise.withResolvers(); + const listener = this.listener; this.stream = new ReadableStream({ async start(controller) { - controller.enqueue(previousEntries) + controller.enqueue(previousEntries); while (true) { try { - const entries = await listener.promise - controller.enqueue(entries) + const entries = await listener.promise; + controller.enqueue(entries); } catch (_) { - controller.close() - break + controller.close(); + break; } } }, cancel() { - listener.reject() + listener.reject(); }, - }) + }); } update(key: DenoKvStrictKey) { - const match = this.keys.some((k) => jsonStringify(k) === jsonStringify(key)) - if (!match) return + const match = this.keys.some((k) => + jsonStringify(k) === jsonStringify(key) + ); + if (!match) return; - const entries = this.kv.getMany(this.keys) + const entries = this.kv.getMany(this.keys); const previousEntry = this.previousEntries.find((entry) => jsonStringify(entry.key) === jsonStringify(key) - ) + ); const newEntry = entries.find((entry) => jsonStringify(entry.key) === jsonStringify(key) - ) + ); - if (!previousEntry || !newEntry) return + if (!previousEntry || !newEntry) return; // if ( // !options?.raw && // previousEntry.versionstamp === newEntry.versionstamp // ) return - this.previousEntries = entries - this.listener.resolve(entries) + this.previousEntries = entries; + this.listener.resolve(entries); const { promise, resolve, reject } = Promise.withResolvers< DenoKvEntryMaybe[] - >() + >(); - this.listener.promise = promise - this.listener.resolve = resolve - this.listener.reject = reject + this.listener.promise = promise; + this.listener.resolve = resolve; + this.listener.reject = reject; } cancel() { - this.listener.reject() + this.listener.reject(); } } diff --git a/src/ext/migrate/deps.ts b/src/ext/migrate/deps.ts index bf39c9b..69bcfe6 100644 --- a/src/ext/migrate/deps.ts +++ b/src/ext/migrate/deps.ts @@ -1 +1 @@ -export { parseArgs } from "jsr:@std/cli@^0.220/parse_args" +export { parseArgs } from "jsr:@std/cli@^0.220/parse_args"; diff --git a/src/ext/migrate/errors.ts b/src/ext/migrate/errors.ts index baf6de4..bd8c97f 100644 --- a/src/ext/migrate/errors.ts +++ b/src/ext/migrate/errors.ts @@ -1,10 +1,10 @@ export class NoKvFoundError extends Error { - name = "NoKvFoundError" + override name = "NoKvFoundError"; constructor( message?: string | undefined, options?: ErrorOptions | undefined, ) { - super(message, options) + super(message, options); } } diff --git a/src/ext/migrate/migrate.ts b/src/ext/migrate/migrate.ts index 846fb21..23a1536 100644 --- a/src/ext/migrate/migrate.ts +++ b/src/ext/migrate/migrate.ts @@ -1,5 +1,5 @@ -import { KVDEX_KEY_PREFIX } from "../../constants.ts" -import type { MigrateOptions } from "./types.ts" +import { KVDEX_KEY_PREFIX } from "../../constants.ts"; +import type { MigrateOptions } from "./types.ts"; /** * Migrate entries from a source KV instance to a target KV instance. @@ -24,8 +24,8 @@ export async function migrate({ target, all, }: MigrateOptions): Promise { - const iter = source.list({ prefix: all ? [] : [KVDEX_KEY_PREFIX] }) + const iter = source.list({ prefix: all ? [] : [KVDEX_KEY_PREFIX] }); for await (const { key, value } of iter) { - await target.set(key, value) + await target.set(key, value); } } diff --git a/src/ext/migrate/mod.ts b/src/ext/migrate/mod.ts index 0a6fb09..fd69e80 100644 --- a/src/ext/migrate/mod.ts +++ b/src/ext/migrate/mod.ts @@ -33,40 +33,40 @@ */ // Imports -import { parseArgs } from "./deps.ts" -import { migrate } from "./migrate.ts" -import { NoKvFoundError } from "./errors.ts" +import { parseArgs } from "./deps.ts"; +import { migrate } from "./migrate.ts"; +import { NoKvFoundError } from "./errors.ts"; // Exports -export { migrate } -export type * from "./types.ts" -export * from "./errors.ts" +export { migrate }; +export type * from "./types.ts"; +export * from "./errors.ts"; // Run migrate if main if (import.meta.main) { const { source, target, all } = parseArgs(Deno.args, { string: ["source", "target"], boolean: ["all"], - }) + }); if (!source) { throw new NoKvFoundError( "A source KV path to export from must be provided using the --source argument", - ) + ); } if (!target) { throw new NoKvFoundError( "A target KV path to export to must be provided using the --target argument", - ) + ); } - using sourceKv = await Deno.openKv(source) - using targetKv = await Deno.openKv(target) + using sourceKv = await Deno.openKv(source); + using targetKv = await Deno.openKv(target); await migrate({ source: sourceKv, target: targetKv, all, - }) + }); } diff --git a/src/ext/migrate/types.ts b/src/ext/migrate/types.ts index 8c46997..6110870 100644 --- a/src/ext/migrate/types.ts +++ b/src/ext/migrate/types.ts @@ -1,15 +1,15 @@ /** Options for migrating entries from a source KV instance to a target KV instance */ export type MigrateOptions = { /** Source KV. */ - source: Deno.Kv + source: Deno.Kv; /** Target KV. */ - target: Deno.Kv + target: Deno.Kv; /** * Flag indicating whether to migrate all entries or only kvdex specific entries. * * @default false */ - all?: boolean -} + all?: boolean; +}; diff --git a/src/ext/zod/deps.ts b/src/ext/zod/deps.ts index 490ab1a..a4d628b 100644 --- a/src/ext/zod/deps.ts +++ b/src/ext/zod/deps.ts @@ -1 +1 @@ -export { z } from "npm:zod@^3.22" +export { z } from "npm:zod@^3.22"; diff --git a/src/ext/zod/mod.ts b/src/ext/zod/mod.ts index 039afef..085825b 100644 --- a/src/ext/zod/mod.ts +++ b/src/ext/zod/mod.ts @@ -25,4 +25,4 @@ * ``` */ -export * from "./schemas.ts" +export * from "./schemas.ts"; diff --git a/src/ext/zod/schemas.ts b/src/ext/zod/schemas.ts index 8f5ab20..a62c4d3 100644 --- a/src/ext/zod/schemas.ts +++ b/src/ext/zod/schemas.ts @@ -1,18 +1,18 @@ -import { z } from "./deps.ts" -import type { KvArray, KvId, KvObject, KvValue } from "../../types.ts" +import { z } from "./deps.ts"; +import type { KvArray, KvId, KvObject, KvValue } from "../../types.ts"; -const LazyKvValueSchema = z.lazy(() => KvValueSchema) +const LazyKvValueSchema = z.lazy(() => KvValueSchema); -const LazyKvArraySchema = z.lazy(() => KvArraySchema) +const LazyKvArraySchema = z.lazy(() => KvArraySchema); -const LazyKvObjectSchema = z.lazy(() => KvObjectSchema) +const LazyKvObjectSchema = z.lazy(() => KvObjectSchema); /** Zod schema for KvId type */ export const KvIdSchema: z.ZodType = z.string() .or(z.number()) .or(z.bigint()) .or(z.boolean()) - .or(z.instanceof(Uint8Array)) + .or(z.instanceof(Uint8Array)); /** Zod schema for KvValue type */ export const KvValueSchema: z.ZodType = z.undefined() @@ -42,13 +42,13 @@ export const KvValueSchema: z.ZodType = z.undefined() .or(z.map(LazyKvValueSchema, LazyKvValueSchema)) .or(z.instanceof(RegExp)) .or(z.instanceof(DataView)) - .or(z.instanceof(Error)) + .or(z.instanceof(Error)); /** Zod schema for KvArray type */ -export const KvArraySchema: z.ZodType = z.array(KvValueSchema) +export const KvArraySchema: z.ZodType = z.array(KvValueSchema); /** Zod schema for KvObject type */ export const KvObjectSchema: z.ZodType = z.record( z.string().or(z.number()), KvValueSchema, -) +); diff --git a/src/kvdex.ts b/src/kvdex.ts index 84a862d..3aa8f81 100644 --- a/src/kvdex.ts +++ b/src/kvdex.ts @@ -19,17 +19,17 @@ import type { Schema, SchemaDefinition, SetIntervalOptions, -} from "./types.ts" -import { Collection } from "./collection.ts" -import { Document } from "./document.ts" +} from "./types.ts"; +import { Collection } from "./collection.ts"; +import { Document } from "./document.ts"; import { allFulfilled, createHandlerId, extendKey, parseQueueMessage, prepareEnqueue, -} from "./utils.ts" -import { AtomicBuilder } from "./atomic_builder.ts" +} from "./utils.ts"; +import { AtomicBuilder } from "./atomic_builder.ts"; import { DEFAULT_INTERVAL_RETRY, DEFAULT_LOOP_RETRY, @@ -37,9 +37,9 @@ import { MIN_INTERVAL_START_DELAY, MIN_LOOP_START_DELAY, UNDELIVERED_KEY_PREFIX, -} from "./constants.ts" -import { model } from "./model.ts" -import { AtomicWrapper } from "./atomic_wrapper.ts" +} from "./constants.ts"; +import { model } from "./model.ts"; +import { AtomicWrapper } from "./atomic_wrapper.ts"; /** * Create a new database instance. @@ -79,8 +79,8 @@ export function kvdex( schemaDefinition: T, ): Kvdex> & Schema { // Set listener activated flag and queue handlers map - let listener: Promise - const queueHandlers = new Map[]>() + let listener: Promise; + const queueHandlers = new Map[]>(); // Create idempotent listener activator const idempotentListener = () => { @@ -89,23 +89,23 @@ export function kvdex( // Add queue listener listener = kv.listenQueue(async (msg) => { // Parse queue message - const parsed = parseQueueMessage(msg) + const parsed = parseQueueMessage(msg); if (!parsed.ok) { - return + return; } // Find correct queue handlers - const { __data__, __handlerId__ } = parsed.msg - const handlers = queueHandlers.get(__handlerId__) + const { __data__, __handlerId__ } = parsed.msg; + const handlers = queueHandlers.get(__handlerId__); // Run queue handlers - await allFulfilled(handlers?.map((handler) => handler(__data__)) ?? []) - }) + await allFulfilled(handlers?.map((handler) => handler(__data__)) ?? []); + }); } // Return queue listener - return listener - } + return listener; + }; // Create schema const schema = _createSchema( @@ -113,21 +113,21 @@ export function kvdex( kv, queueHandlers, idempotentListener, - ) as Schema + ) as Schema; // Create KvDex object - const db = new Kvdex(kv, schema, queueHandlers, idempotentListener) + const db = new Kvdex(kv, schema, queueHandlers, idempotentListener); // Return schema and db combination - return Object.assign(db, schema) + return Object.assign(db, schema); } /** Represents a database instance and contains methods for working on all documents and top-level queues. */ export class Kvdex> { - private kv: DenoKv - private schema: TSchema - private queueHandlers: Map[]> - private idempotentListener: () => Promise + private kv: DenoKv; + private schema: TSchema; + private queueHandlers: Map[]>; + private idempotentListener: () => Promise; constructor( kv: DenoKv, @@ -135,10 +135,10 @@ export class Kvdex> { queueHandlers: Map[]>, idempotentListener: () => Promise, ) { - this.kv = kv - this.schema = schema - this.queueHandlers = queueHandlers - this.idempotentListener = idempotentListener + this.kv = kv; + this.schema = schema; + this.queueHandlers = queueHandlers; + this.idempotentListener = idempotentListener; } /** @@ -166,7 +166,7 @@ export class Kvdex> { TOutput, TOptions > { - return new AtomicBuilder(this.kv, this.schema, selector(this.schema)) + return new AtomicBuilder(this.kv, this.schema, selector(this.schema)); } /** @@ -186,7 +186,7 @@ export class Kvdex> { * @returns Promise resolving to a number representing the total count of documents in the KV store. */ async countAll(options?: CountAllOptions): Promise { - return await _countAll(this.kv, this.schema, options) + return await _countAll(this.kv, this.schema, options); } /** @@ -200,7 +200,7 @@ export class Kvdex> { * @returns Promise resolving to void. */ async deleteAll(): Promise { - await _deleteAll(this.kv, this.schema) + await _deleteAll(this.kv, this.schema); } /** @@ -215,18 +215,18 @@ export class Kvdex> { */ async wipe(): Promise { // Create iterator - const iter = this.kv.list({ prefix: [KVDEX_KEY_PREFIX] }) + const iter = this.kv.list({ prefix: [KVDEX_KEY_PREFIX] }); // Collect all kvdex keys - const keys: DenoKvStrictKey[] = [] + const keys: DenoKvStrictKey[] = []; for await (const { key } of iter) { - keys.push(key as DenoKvStrictKey) + keys.push(key as DenoKvStrictKey); } // Delete all entries - const atomic = new AtomicWrapper(this.kv) - keys.forEach((key) => atomic.delete(key)) - await atomic.commit() + const atomic = new AtomicWrapper(this.kv); + keys.forEach((key) => atomic.delete(key)); + await atomic.commit(); } /** @@ -260,9 +260,9 @@ export class Kvdex> { [KVDEX_KEY_PREFIX, UNDELIVERED_KEY_PREFIX], data, options, - ) + ); - return await this.kv.enqueue(prep.msg, prep.options) + return await this.kv.enqueue(prep.msg, prep.options); } /** @@ -294,15 +294,15 @@ export class Kvdex> { options?: QueueListenerOptions, ): Promise { // Create handler id - const handlerId = createHandlerId([KVDEX_KEY_PREFIX], options?.topic) + const handlerId = createHandlerId([KVDEX_KEY_PREFIX], options?.topic); // Add new handler to specified handlers - const handlers = this.queueHandlers.get(handlerId) ?? [] - handlers.push(handler as QueueMessageHandler) - this.queueHandlers.set(handlerId, handlers) + const handlers = this.queueHandlers.get(handlerId) ?? []; + handlers.push(handler as QueueMessageHandler); + this.queueHandlers.set(handlerId, handlers); // Activate idempotent listener - return this.idempotentListener() + return this.idempotentListener(); } /** @@ -326,12 +326,12 @@ export class Kvdex> { options?: FindOptions, ): Promise | null> { // Create document key, get document entry - const key = extendKey([KVDEX_KEY_PREFIX], UNDELIVERED_KEY_PREFIX, id) - const result = await this.kv.get(key, options) + const key = extendKey([KVDEX_KEY_PREFIX], UNDELIVERED_KEY_PREFIX, id); + const result = await this.kv.get(key, options); // If no entry exists, return null if (result.value === null || result.versionstamp === null) { - return null + return null; } // Return document @@ -339,7 +339,7 @@ export class Kvdex> { id, versionstamp: result.versionstamp, value: result.value as T1, - }) + }); } /** @@ -353,8 +353,8 @@ export class Kvdex> { * @param id - Id of undelivered document. */ async deleteUndelivered(id: KvId): Promise { - const key = extendKey([KVDEX_KEY_PREFIX], UNDELIVERED_KEY_PREFIX, id) - await this.kv.delete(key) + const key = extendKey([KVDEX_KEY_PREFIX], UNDELIVERED_KEY_PREFIX, id); + await this.kv.delete(key); } /** @@ -395,7 +395,7 @@ export class Kvdex> { options?: SetIntervalOptions, ): Promise { // Set id - const id = crypto.randomUUID() + const id = crypto.randomUUID(); // Create interval enqueuer const enqueue = async ( @@ -408,33 +408,33 @@ export class Kvdex> { idsIfUndelivered: [id], delay, topic: id, - }) + }); // Check if message was delivered, break for-loop if successful - const doc = await this.findUndelivered(id) + const doc = await this.findUndelivered(id); if (doc === null) { - break + break; } // Delete undelivered entry before retrying - await this.deleteUndelivered(id) + await this.deleteUndelivered(id); } - } + }; // Add interval listener const listener = this.listenQueue(async (msg) => { // Check if while condition is met, terminate interval if false - const shouldContinue = options?.while?.(msg) ?? true + const shouldContinue = options?.while?.(msg) ?? true; if (!shouldContinue) { - await options?.onExit?.(msg) - return + await options?.onExit?.(msg); + return; } // Determine next interval delay const delay = typeof interval === "function" ? await interval(msg) - : interval + : interval; await allFulfilled([ // Enqueue next callback @@ -447,8 +447,8 @@ export class Kvdex> { // Invoke callback function fn(msg), - ]) - }, { topic: id }) + ]); + }, { topic: id }); // Enqueue first task await enqueue( @@ -462,10 +462,10 @@ export class Kvdex> { options?.startDelay ?? MIN_LOOP_START_DELAY, MIN_INTERVAL_START_DELAY, ), - ) + ); // Return listener - return listener + return listener; } /** @@ -500,7 +500,7 @@ export class Kvdex> { options?: LoopOptions>, ): Promise { // Set id - const id = crypto.randomUUID() + const id = crypto.randomUUID(); // Create loop enqueuer const enqueue = async ( @@ -513,36 +513,36 @@ export class Kvdex> { idsIfUndelivered: [id], delay, topic: id, - }) + }); // Check if message was delivered, break for-loop if successful - const doc = await this.findUndelivered(id) + const doc = await this.findUndelivered(id); if (doc === null) { - break + break; } // Delete undelivered entry before retrying - await this.deleteUndelivered(id) + await this.deleteUndelivered(id); } - } + }; // Add loop listener const listener = this.listenQueue>>(async (msg) => { // Check if while condition is met, terminate loop if false - const shouldContinue = await options?.while?.(msg) ?? true + const shouldContinue = await options?.while?.(msg) ?? true; if (!shouldContinue) { - await options?.onExit?.(msg) - return + await options?.onExit?.(msg); + return; } // Set the next delay const delay = typeof options?.delay === "function" ? await options.delay(msg) - : options?.delay ?? 0 + : options?.delay ?? 0; // Run task - const result = await fn(msg) + const result = await fn(msg); // Enqueue next task await enqueue({ @@ -551,8 +551,8 @@ export class Kvdex> { delay: delay, timestamp: new Date(), first: false, - }, delay) - }, { topic: id }) + }, delay); + }, { topic: id }); // Enqueue first task await enqueue( @@ -567,10 +567,10 @@ export class Kvdex> { options?.startDelay ?? MIN_LOOP_START_DELAY, MIN_LOOP_START_DELAY, ), - ) + ); // Return listener - return await listener + return await listener; } } @@ -596,30 +596,30 @@ function _createSchema( treeKey?: KvKey, ): Schema { // Get all the definition entries - const entries = Object.entries(def) + const entries = Object.entries(def); // Create schema entries from schema definition entries const schemaEntries = entries.map(([key, value]) => { // Get the current tree key - const extendedKey = treeKey ? extendKey(treeKey, key) : [key] as KvKey + const extendedKey = treeKey ? extendKey(treeKey, key) : [key] as KvKey; // If the entry value is a function => build collection and create collection entry if (typeof value === "function") { - return [key, value(kv, extendedKey, queueHandlers, idempotentListener)] + return [key, value(kv, extendedKey, queueHandlers, idempotentListener)]; } // Create and return schema entry return [ key, _createSchema(value, kv, queueHandlers, idempotentListener, extendedKey), - ] - }) + ]; + }); // Create schema object from schema entries - const schema = Object.fromEntries(schemaEntries) + const schema = Object.fromEntries(schemaEntries); // Return the built schema object - return schema as Schema + return schema as Schema; } /** @@ -639,16 +639,16 @@ async function _countAll( ): Promise { // If input is a collection, return the collection count if (schemaOrCollection instanceof Collection) { - return await schemaOrCollection.count(options) + return await schemaOrCollection.count(options); } // Recursively count the schema collections. const counts = await allFulfilled( Object.values(schemaOrCollection).map((val) => _countAll(kv, val, options)), - ) + ); // Return the sum of collection counts - return counts.reduce((sum, c) => sum + c, 0) + return counts.reduce((sum, c) => sum + c, 0); } /** @@ -666,12 +666,12 @@ async function _deleteAll( ): Promise { // If input is a collection, perform deleteMany if (schemaOrCollection instanceof Collection) { - await schemaOrCollection.deleteMany() - return + await schemaOrCollection.deleteMany(); + return; } // Recursively perform delete for all schemas/collections await allFulfilled( Object.values(schemaOrCollection).map((val) => _deleteAll(kv, val)), - ) + ); } diff --git a/src/model.ts b/src/model.ts index d48ab53..02cff22 100644 --- a/src/model.ts +++ b/src/model.ts @@ -1,4 +1,4 @@ -import type { KvValue, Model } from "./types.ts" +import type { KvValue, Model } from "./types.ts"; /** * Create a standard model without data validation. @@ -36,5 +36,5 @@ export function model< parse: (data) => data as TOutput, _transform: transform, _input: null as TInput, - } + }; } diff --git a/src/types.ts b/src/types.ts index df9f5b7..659819d 100644 --- a/src/types.ts +++ b/src/types.ts @@ -1,6 +1,6 @@ -import type { Collection } from "./collection.ts" -import type { DeepMergeOptions } from "./deps.ts" -import type { Document } from "./document.ts" +import type { Collection } from "./collection.ts"; +import type { DeepMergeOptions } from "./deps.ts"; +import type { Document } from "./document.ts"; /*********************/ /* */ @@ -18,53 +18,53 @@ export type BuilderFn< key: KvKey, queueHandlers: QueueHandlers, idempotentListener: IdempotentListener, -) => Collection +) => Collection; /** Any collection builder function */ -export type BuilderFnAny = (...args: any[]) => any +export type BuilderFnAny = (...args: any[]) => any; /** An idempotent listener invoker */ -export type IdempotentListener = () => Promise +export type IdempotentListener = () => Promise; /** Utility type for checking if K is a valid key of T */ -export type CheckKeyOf = K extends keyof T ? T[K] : never +export type CheckKeyOf = K extends keyof T ? T[K] : never; /** Utility type for selecting keys of object T1 that extend T2 */ export type KeysOfThatExtend = keyof { - [K in keyof T1 as T1[K] extends T2 ? K : never]: unknown -} + [K in keyof T1 as T1[K] extends T2 ? K : never]: unknown; +}; /** Utility type for selecting keys of object T1 that do not extend T2 */ export type KeysOfThatDontExtend = keyof { - [K in keyof T1 as T1[K] extends T2 ? never : K]: unknown -} + [K in keyof T1 as T1[K] extends T2 ? never : K]: unknown; +}; /** Successful commit result object */ export type CommitResult = { - ok: true - versionstamp: Document["versionstamp"] - id: T2 -} + ok: true; + versionstamp: Document["versionstamp"]; + id: T2; +}; /** Many commit result object */ export type ManyCommitResult = { - ok: true -} + ok: true; +}; /** Pagination object containing last cursor */ export type Pagination = { - cursor: string | undefined -} + cursor: string | undefined; +}; /** Pagination result object containing last cursor and result array */ export type PaginationResult = Pagination & { - result: T[] -} + result: T[]; +}; /** Id generator function */ export type IdGenerator = ( data: T1, -) => T2 | Promise +) => T2 | Promise; /**********************/ /* */ @@ -75,22 +75,22 @@ export type IdGenerator = ( /** Either static or dynamic interval value */ export type IntervalSetter = | number - | ((msg: IntervalMessage) => number | Promise) + | ((msg: IntervalMessage) => number | Promise); /** Options for creating a new interval */ export type SetIntervalOptions = { /** Condition used to determine if the interval should continue running */ - while?: (msg: IntervalMessage) => boolean | Promise + while?: (msg: IntervalMessage) => boolean | Promise; /** Task to be run when terminating the interval, executed after `while()` returns true. */ - onExit?: (msg: IntervalMessage) => unknown + onExit?: (msg: IntervalMessage) => unknown; /** * Delay before running the first task. * * If not set, will run the first task immediately. */ - startDelay?: number + startDelay?: number; /** * Number of retry attempts upon failed enqueue delivery. @@ -99,23 +99,23 @@ export type SetIntervalOptions = { * * @default 10 */ - retry?: number -} + retry?: number; +}; /** Contents of interval megssage */ export type IntervalMessage = { /** Task number, starts at 0 for the first task. */ - count: number + count: number; /** Previously set interval. Equal to `startDelay` or 0 for the first task. */ - interval: number + interval: number; /** Enqueue timestamp of current task. */ - timestamp: Date + timestamp: Date; /** True if the current task is the first callback, false if not. */ - first: boolean -} + first: boolean; +}; /******************/ /* */ @@ -130,20 +130,20 @@ export type LoopOptions = { * * If not set, next callback is invoked immediately on task end. */ - delay?: number | ((msg: LoopMessage) => number | Promise) + delay?: number | ((msg: LoopMessage) => number | Promise); /** Condition used to determine if the loop should continue running */ - while?: (msg: LoopMessage) => boolean | Promise + while?: (msg: LoopMessage) => boolean | Promise; /** Task to be run when terminating the loop, executed after `while()` returns true. */ - onExit?: (msg: LoopMessage) => unknown + onExit?: (msg: LoopMessage) => unknown; /** * Delay before running the first task. * * If not set, will run the first task immediately. */ - startDelay?: number + startDelay?: number; /** * Number of retry attempts upon failed enqueue delivery. @@ -152,37 +152,37 @@ export type LoopOptions = { * * @default 10 */ - retry?: number -} + retry?: number; +}; /** Contents of loop message */ export type LoopMessage = & { /** Task number, starts at 0. */ - count: number + count: number; /** Previously set delay, is equal to `startDelay` or 0 for the first task. */ - delay: number + delay: number; /** Enqueue timestamp of current task. */ - timestamp: Date + timestamp: Date; } & ( | { /** Result of prevous task. Is null for the first task. */ - result: T + result: T; /** True if the current task is the first callback, false if not. */ - first: false + first: false; } | { /** Result of prevous task. Is null for the first task. */ - result: null + result: null; /** True if the current task is the first callback, false if not. */ - first: true + first: true; } - ) + ); /****************************/ /* */ @@ -198,72 +198,72 @@ export type CollectionSelector< TOptions extends CollectionOptions, > = ( schema: TSchema, -) => Collection +) => Collection; /** Prepared value delete function */ -export type PrepareDeleteFn = (kv: DenoKv) => Promise +export type PrepareDeleteFn = (kv: DenoKv) => Promise; /** Prepared index delete function */ export type PreparedIndexDelete = { - id: KvId - data: KvObject -} + id: KvId; + data: KvObject; +}; /** Atomic builder operations */ export type Operations = { - atomic: DenoAtomicOperation - asyncMutations: Array<() => Promise> - prepareDeleteFns: PrepareDeleteFn[] - indexDeleteCollectionKeys: KvKey[] - indexAddCollectionKeys: KvKey[] -} + atomic: DenoAtomicOperation; + asyncMutations: Array<() => Promise>; + prepareDeleteFns: PrepareDeleteFn[]; + indexDeleteCollectionKeys: KvKey[]; + indexAddCollectionKeys: KvKey[]; +}; /** Kvdex atomic check */ export type AtomicCheck = { /** Id of document to check */ - id: Document["id"] + id: Document["id"]; /** Versionstamp of document to check */ - versionstamp: Document["versionstamp"] -} + versionstamp: Document["versionstamp"]; +}; /** Atomic mutation object */ export type AtomicMutation = & { - id: T2 + id: T2; } & ( | { - type: "set" - value: T1 - expireIn?: number + type: "set"; + value: T1; + expireIn?: number; } | { - type: "add" - value: T1 - expireIn?: number + type: "add"; + value: T1; + expireIn?: number; } | { - type: "sum" - value: T1 extends DenoKvU64 ? bigint : never + type: "sum"; + value: T1 extends DenoKvU64 ? bigint : never; } | { - type: "min" - value: T1 extends DenoKvU64 ? bigint : never + type: "min"; + value: T1 extends DenoKvU64 ? bigint : never; } | { - type: "max" - value: T1 extends DenoKvU64 ? bigint : never + type: "max"; + value: T1 extends DenoKvU64 ? bigint : never; } | { - type: "delete" + type: "delete"; } - ) + ); /** Options for atomic set operation */ export type AtomicSetOptions = NonNullable< Parameters["set"]>["2"] -> +>; /************************/ /* */ @@ -274,34 +274,36 @@ export type AtomicSetOptions = NonNullable< /** Options for creating a new collection */ export type CollectionOptions = & { - idGenerator?: IdGenerator - serialize?: SerializeOptions - history?: true + idGenerator?: IdGenerator; + serialize?: SerializeOptions; + history?: true; } & ( T extends KvObject ? { - indices?: IndexRecord + indices?: IndexRecord; } : { [K in never]: never } - ) + ); export type ParseId> = T["idGenerator"] extends - IdGenerator ? Awaited> : string + IdGenerator ? Awaited> : string; /** Utility type for accessing all possible collection options */ -export type PossibleCollectionOptions = CollectionOptions> +export type PossibleCollectionOptions = CollectionOptions< + Record +>; /** Record of all collection keys */ export type CollectionKeys = { - base: KvKey - id: KvKey - primaryIndex: KvKey - secondaryIndex: KvKey - segment: KvKey - undelivered: KvKey - history: KvKey - historySegment: KvKey -} + base: KvKey; + id: KvKey; + primaryIndex: KvKey; + secondaryIndex: KvKey; + segment: KvKey; + undelivered: KvKey; + history: KvKey; + historySegment: KvKey; +}; /** * Model describing the input and output type of data. @@ -309,32 +311,32 @@ export type CollectionKeys = { */ export type Model = { /** A parse function that takes data as an argument and returns the parsed output */ - parse(data: unknown): TOutput + parse(data: unknown): TOutput; /** * An optional transform function that takes an input value as argument and returns the output type. */ - _transform?(input: TInput): TOutput + _transform?(input: TInput): TOutput; /** Used to determine the input type */ - _input: TInput -} + _input: TInput; +}; /** Historic write entry */ export type WriteHistoryEntry = { - type: "write" - timestamp: Date - value: T -} + type: "write"; + timestamp: Date; + value: T; +}; /** Historic delete entry */ export type DeleteHistoryEntry = { - type: "delete" - timestamp: Date -} + type: "delete"; + timestamp: Date; +}; /** Historic document entry */ -export type HistoryEntry = WriteHistoryEntry | DeleteHistoryEntry +export type HistoryEntry = WriteHistoryEntry | DeleteHistoryEntry; /*******************/ /* */ @@ -343,12 +345,12 @@ export type HistoryEntry = WriteHistoryEntry | DeleteHistoryEntry /*******************/ /** Type of index. "primary" is unique, while "secondary" is non-unique. */ -export type IndexType = "primary" | "secondary" +export type IndexType = "primary" | "secondary"; /** Record of indices */ export type IndexRecord = { - [K in KeysOfThatExtend]?: IndexType -} + [K in KeysOfThatExtend]?: IndexType; +}; /** Keys of primary indices */ export type PrimaryIndexKeys< @@ -356,7 +358,7 @@ export type PrimaryIndexKeys< T2 extends CollectionOptions, > = T2 extends { indices: IndexRecord } ? KeysOfThatExtend - : never + : never; /** Keys of secondary indices */ export type SecondaryIndexKeys< @@ -364,12 +366,12 @@ export type SecondaryIndexKeys< T2 extends CollectionOptions, > = T2 extends { indices: IndexRecord } ? KeysOfThatExtend - : never + : never; /** Indexed value entry */ export type IndexDataEntry = Omit & { - __id__: KvId -} + __id__: KvId; +}; /***********************/ /* */ @@ -379,17 +381,17 @@ export type IndexDataEntry = Omit & { /** Record of serializer functions */ export type Serializer = { - serialize: (data: T) => Uint8Array | Promise - deserialize: (data: Uint8Array) => T | Promise - compress: (data: Uint8Array) => Uint8Array | Promise - decompress: (data: Uint8Array) => Uint8Array | Promise -} + serialize: (data: T) => Uint8Array | Promise; + deserialize: (data: Uint8Array) => T | Promise; + compress: (data: Uint8Array) => Uint8Array | Promise; + decompress: (data: Uint8Array) => Uint8Array | Promise; +}; /** Serialized value entry */ export type SerializedEntry = { - isUint8Array: boolean - ids: KvId[] -} + isUint8Array: boolean; + ids: KvId[]; +}; /** * Serialize options. @@ -413,7 +415,7 @@ export type SerializeOptions = | "v8-uncompressed" | "json" | "json-uncompressed" - | Partial + | Partial; /***************************/ /* */ @@ -424,14 +426,14 @@ export type SerializeOptions = /** Options for setting new document entry */ export type SetOptions = NonNullable["2"]> & { /** Number of retry attempts before returning failed operation */ - retry?: number + retry?: number; /** * Whether the operation should overwrite an existing document with the same id or not. * * @default false */ - overwrite?: boolean + overwrite?: boolean; /** * Enable or disable atomic operation batching. @@ -441,8 +443,8 @@ export type SetOptions = NonNullable["2"]> & { * * @default false */ - batched?: boolean -} + batched?: boolean; +}; /** Options for listing documents */ export type ListOptions = @@ -454,39 +456,39 @@ export type ListOptions = * @param value - Input value. * @returns true or false. */ - filter?: (value: T1) => boolean + filter?: (value: T1) => boolean; /** * Number of documents to offset by. * * If set, the underlying limit for the KV.list operation is set equal to offset + limit. */ - offset?: number + offset?: number; /** Id of document to start from. */ - startId?: T2 + startId?: T2; /** Id of document to end at. */ - endId?: T2 + endId?: T2; /** Max number of documents that are read from the KV store. Applies before before filtering. */ - limit?: number + limit?: number; /** Max number of documents that are returned. Differs from "limit" by applying after documents are read from the KV store and filtered. */ - take?: number - } + take?: number; + }; /** Options for handling one listed document */ export type HandleOneOptions = Omit< ListOptions, "take" -> +>; /** Options for finding a single document */ -export type FindOptions = NonNullable[1]> +export type FindOptions = NonNullable[1]>; /** Options for finding many documents */ -export type FindManyOptions = NonNullable[1]> +export type FindManyOptions = NonNullable[1]>; /** Options for updating a single document */ export type UpdateOptions = Omit & { @@ -499,11 +501,11 @@ export type UpdateOptions = Omit & { * * @default "merge" */ - strategy?: UpdateStrategy + strategy?: UpdateStrategy; /** Options to apply when deep-merging objects. */ - mergeOptions?: DeepMergeOptions -} + mergeOptions?: DeepMergeOptions; +}; /** * Update strategy. @@ -512,20 +514,20 @@ export type UpdateOptions = Omit & { * "merge" deep-merges the existing value with a new value, * "merge-shallow" shallow-merges the existing value with a new value */ -export type UpdateStrategy = "replace" | "merge" | "merge-shallow" +export type UpdateStrategy = "replace" | "merge" | "merge-shallow"; /** Options for updating many documents */ export type UpdateManyOptions = & ListOptions - & UpdateOptions + & UpdateOptions; /** Options for updating one listed document */ export type UpdateOneOptions = & HandleOneOptions - & UpdateOptions + & UpdateOptions; /** Options for counting all documents */ -export type CountAllOptions = Pick, "consistency"> +export type CountAllOptions = Pick, "consistency">; /** Options for enqueing messages */ export type EnqueueOptions = @@ -535,20 +537,20 @@ export type EnqueueOptions = > & { /** List of ids to set the message value to if undelivered */ - idsIfUndelivered?: KvId[] + idsIfUndelivered?: KvId[]; /** Topic to queue the message in. Only listeners in the same topic will receive the message. */ - topic?: string - } + topic?: string; + }; /** Options for listening to queue messages */ export type QueueListenerOptions = { /** Topic to listen to. Only messages enqueued in the same topic will be received. */ - topic?: string -} + topic?: string; +}; /** Options for watching for live data updates */ -export type WatchOptions = NonNullable[1]> +export type WatchOptions = NonNullable[1]>; /********************/ /* */ @@ -564,14 +566,14 @@ export type IdUpsert< TId extends KvId, > = { /** Document id to upsert by */ - id: TId + id: TId; /** New value */ - set: TInput + set: TInput; /** Update value */ - update: UpdateData -} + update: UpdateData; +}; /** Upsert by primary index */ export type PrimaryIndexUpsert< @@ -582,17 +584,17 @@ export type PrimaryIndexUpsert< TId extends KvId, > = { /** Id of document if new value is set */ - id?: TId + id?: TId; /** Document index to upsert by */ - index: [TIndex, CheckKeyOf] + index: [TIndex, CheckKeyOf]; /** New value */ - set: TInput + set: TInput; /** Update value */ - update: UpdateData -} + update: UpdateData; +}; /********************/ /* */ @@ -604,15 +606,15 @@ export type PrimaryIndexUpsert< export type SchemaDefinition = { [key: string]: | SchemaDefinition - | BuilderFnAny -} + | BuilderFnAny; +}; /** Built schema from schema definition */ export type Schema = { [K in keyof T]: T[K] extends SchemaDefinition ? Schema : T[K] extends BuilderFnAny ? ReturnType - : never -} + : never; +}; /*******************/ /* */ @@ -622,30 +624,30 @@ export type Schema = { /** Queue message contents */ export type QueueMessage = { - __is_undefined__: boolean - __handlerId__: string - __data__: T -} + __is_undefined__: boolean; + __handlerId__: string; + __data__: T; +}; /** Parsed queue message */ export type ParsedQueueMessage = { - ok: true - msg: QueueMessage + ok: true; + msg: QueueMessage; } | { - ok: false -} + ok: false; +}; /** Queue message handler function */ -export type QueueMessageHandler = (data: T) => unknown +export type QueueMessageHandler = (data: T) => unknown; /** Prepared enqueue */ export type PreparedEnqueue = { - msg: QueueMessage - options: KvEnqueueOptions -} + msg: QueueMessage; + options: KvEnqueueOptions; +}; /** Collection of queue handler functions */ -export type QueueHandlers = Map[]> +export type QueueHandlers = Map[]>; /******************/ /* */ @@ -657,23 +659,23 @@ export type QueueHandlers = Map[]> export type UpdateData< TOutput extends KvValue, TStrategy extends UpdateStrategy | undefined, -> = TStrategy extends "replace" ? TOutput : Partial +> = TStrategy extends "replace" ? TOutput : Partial; /** Flattened document data */ export type FlatDocumentData = & Omit, "value"> & ( T1 extends KvObject ? T1 : { - readonly value: T1 + readonly value: T1; } - ) + ); /** Document data */ export type DocumentData = { - readonly id: T2 - readonly versionstamp: string - readonly value: T1 -} + readonly id: T2; + readonly versionstamp: string; + readonly value: T1; +}; /****************/ /* */ @@ -684,21 +686,21 @@ export type DocumentData = { /** Kv enqueue options */ export type KvEnqueueOptions = NonNullable< Parameters[1] -> +>; /** An entry or collection key */ -export type KvKey = [DenoKvStrictKeyPart, ...DenoKvStrictKey] +export type KvKey = [DenoKvStrictKeyPart, ...DenoKvStrictKey]; /** An entry ID */ -export type KvId = DenoKvStrictKeyPart +export type KvId = DenoKvStrictKeyPart; /** An object containing only KV values, and is itself a KV value. */ export type KvObject = { - [K: string | number]: KvValue -} + [K: string | number]: KvValue; +}; /** An array containing only KV values, and is itself a KV value. */ -export type KvArray = KvValue[] +export type KvArray = KvValue[]; /** Defines all valid KV value types */ export type KvValue = @@ -729,7 +731,7 @@ export type KvValue = | Map | RegExp | DataView - | Error + | Error; /********************/ /* */ @@ -743,41 +745,41 @@ export type DenoKvStrictKeyPart = | string | number | bigint - | Uint8Array + | Uint8Array; /** Deno [KvKeyPart](https://deno.land/api?s=Deno.KvKeyPart&unstable=) (relaxed) substitute type */ -export type DenoKvLaxKeyPart = DenoKvStrictKeyPart | symbol +export type DenoKvLaxKeyPart = DenoKvStrictKeyPart | symbol; /** Deno [KvKey](https://deno.land/api?s=Deno.KvKey&unstable=) (strict) substitute type */ -export type DenoKvStrictKey = DenoKvStrictKeyPart[] +export type DenoKvStrictKey = DenoKvStrictKeyPart[]; /** Deno [KvKey](https://deno.land/api?s=Deno.KvKey&unstable=) (relaxed) substitute type */ -export type DenoKvLaxKey = DenoKvLaxKeyPart[] +export type DenoKvLaxKey = DenoKvLaxKeyPart[]; /** Deno [KvU64](https://deno.land/api?s=Deno.KvU64&unstable=) substitute type */ export type DenoKvU64 = { /** The value of this unsigned 64-bit integer, represented as a bigint. */ - value: bigint -} + value: bigint; +}; /** Deno [KvCommitError](https://deno.land/api?s=Deno.KvCommitError&unstable=) substitute type */ export type DenoKvCommitError = { - ok: false -} + ok: false; +}; /** Deno [KvCommitResult](https://deno.land/api?s=Deno.KvCommitResult&unstable=) substitute type */ export type DenoKvCommitResult = { - ok: true + ok: true; /** The versionstamp of the value committed to KV. */ - versionstamp: string -} + versionstamp: string; +}; /** Deno [AtomicCheck](https://deno.land/api?s=Deno.AtomicCheck&unstable=) substitute type */ export type DenoAtomicCheck = { - key: DenoKvStrictKey - versionstamp: string | null -} + key: DenoKvStrictKey; + versionstamp: string | null; +}; /** Deno [KvEnqueueOptions](https://deno.land/api?s=Deno.Kv&unstable=&p=prototype.enqueue) substitute type */ export type DenoKvEnqueueOptions = { @@ -787,41 +789,41 @@ export type DenoKvEnqueueOptions = { * * @default 0 */ - delay?: number + delay?: number; /** * Can be used to specify the keys to be set if the value is not successfully delivered to the queue listener after several attempts. * The values are set to the value of the queued message. */ - keysIfUndelivered?: DenoKvStrictKey[] + keysIfUndelivered?: DenoKvStrictKey[]; /** * Can be used to specify the retry policy for failed message delivery. * Each element in the array represents the number of milliseconds to wait before retrying the delivery. * For example, [1000, 5000, 10000] means that a failed delivery will be retried at most 3 times, with 1 second, 5 seconds, and 10 seconds delay between each retry. */ - backoffSchedule?: number[] -} + backoffSchedule?: number[]; +}; /** Deno [KvEntry](https://deno.land/api?s=Deno.KvEntry&unstable=) substitute type */ export type DenoKvEntry = { - key: Readonly - value: unknown - versionstamp: string -} + key: Readonly; + value: unknown; + versionstamp: string; +}; /** Deno [KvEntryNull](https://deno.land/api?s=Deno.KvEntryMaybe&unstable=) substitute type */ export type DenoKvEntryNull = { - key: Readonly - value: null - versionstamp: null -} + key: Readonly; + value: null; + versionstamp: null; +}; /** Deno [KvEntryMaybe](https://deno.land/api?s=Deno.KvEntryMaybe&unstable=) substitute type */ -export type DenoKvEntryMaybe = DenoKvEntry | DenoKvEntryNull +export type DenoKvEntryMaybe = DenoKvEntry | DenoKvEntryNull; /** Deno [KvConsistencyLevel](https://deno.land/api?s=Deno.KvConsistencyLevel&unstable=) substitute type */ -export type DenoKvConsistencyLevel = "strong" | "eventual" +export type DenoKvConsistencyLevel = "strong" | "eventual"; /** Deno [KvGetOptions](https://deno.land/api?s=Deno.Kv&unstable=&p=prototype.get) substitute type */ export type DenoKvGetOptions = { @@ -832,8 +834,8 @@ export type DenoKvGetOptions = { * * @default "strong" */ - consistency?: DenoKvConsistencyLevel -} + consistency?: DenoKvConsistencyLevel; +}; /** Deno [KvSetOptions](https://deno.land/api?s=Deno.Kv&unstable=&p=prototype.set) substitute type */ export type DenoKvSetOptions = { @@ -843,8 +845,8 @@ export type DenoKvSetOptions = { * Once the specified duration has passed, the key may still be visible for some additional time. * If the expireIn option is not specified, the key will not expire. */ - expireIn?: number -} + expireIn?: number; +}; /** Deno [KvWatchOptions](https://deno.land/api?s=Deno.Kv&unstable=&p=prototype.watch) substitute type */ export type DenoKvWatchOptions = { @@ -857,23 +859,23 @@ export type DenoKvWatchOptions = { * * @default false */ - raw?: boolean -} + raw?: boolean; +}; /** Deno [KvListSelector](https://deno.land/api?s=Deno.KvListSelector&unstable=) substitute type */ export type DenoKvListSelector = | { prefix: DenoKvStrictKey } | { prefix: DenoKvStrictKey; start: DenoKvStrictKey } | { prefix: DenoKvStrictKey; end: DenoKvStrictKey } - | { start: DenoKvStrictKey; end: DenoKvStrictKey } + | { start: DenoKvStrictKey; end: DenoKvStrictKey }; /** Deno [KvListOptions](https://deno.land/api?s=Deno.KvListOptions&unstable=) substitute type */ export type DenoKvListOptions = { /** The maximum number of values to return from the database. If not specified, all matching values will be returned. */ - limit?: number + limit?: number; /** The cursor to resume the iteration from. If not specified, the iteration will start from the beginning. */ - cursor?: string + cursor?: string; /** * Whether to reverse the order of the returned values. @@ -882,7 +884,7 @@ export type DenoKvListOptions = { * * @default false */ - reverse?: boolean + reverse?: boolean; /** * The consistency level of the list operation. @@ -898,7 +900,7 @@ export type DenoKvListOptions = { * * @default "strong" */ - consistency?: DenoKvConsistencyLevel + consistency?: DenoKvConsistencyLevel; /** * The size of the batches in which the list operation is performed. @@ -909,8 +911,8 @@ export type DenoKvListOptions = { * The default batch size is equal to the limit option, or 100 if this is unset. * The maximum value for this option is 500. Larger values will be clamped. */ - batchSize?: number -} + batchSize?: number; +}; /** Deno [KVListIterator](https://deno.land/api?s=Deno.KvListIterator&unstable=) substitute type */ export type DenoKvListIterator = @@ -924,8 +926,8 @@ export type DenoKvListIterator = * This cursor can be used to resume iteration from the current position in the future * by passing it to any of the list operations (e.g. `getMany()`, `map()`, `forEach()` etc). */ - cursor: string - } + cursor: string; + }; /** Deno [AtomicOperation](deno.land/api?s=Deno.AtomicOperation&unstable=) substitute type */ export type DenoAtomicOperation = { @@ -933,55 +935,55 @@ export type DenoAtomicOperation = { key: DenoKvStrictKey, value: unknown, options?: DenoKvSetOptions, - ): DenoAtomicOperation + ): DenoAtomicOperation; - delete(key: DenoKvStrictKey): DenoAtomicOperation + delete(key: DenoKvStrictKey): DenoAtomicOperation; - min(key: DenoKvStrictKey, n: bigint): DenoAtomicOperation + min(key: DenoKvStrictKey, n: bigint): DenoAtomicOperation; - max(key: DenoKvStrictKey, n: bigint): DenoAtomicOperation + max(key: DenoKvStrictKey, n: bigint): DenoAtomicOperation; - sum(key: DenoKvStrictKey, n: bigint): DenoAtomicOperation + sum(key: DenoKvStrictKey, n: bigint): DenoAtomicOperation; - check(...checks: DenoAtomicCheck[]): DenoAtomicOperation + check(...checks: DenoAtomicCheck[]): DenoAtomicOperation; - enqueue(value: unknown, options?: DenoKvEnqueueOptions): DenoAtomicOperation + enqueue(value: unknown, options?: DenoKvEnqueueOptions): DenoAtomicOperation; commit(): | Promise | DenoKvCommitError - | DenoKvCommitResult -} + | DenoKvCommitResult; +}; /** Deno [KV](https://deno.land/api?s=Deno.Kv&unstable=) substitute type */ export type DenoKv = { - atomic(): DenoAtomicOperation + atomic(): DenoAtomicOperation; - close(): void + close(): void; - delete(key: DenoKvStrictKey): Promise | void + delete(key: DenoKvStrictKey): Promise | void; enqueue( value: unknown, options?: DenoKvEnqueueOptions, - ): Promise | DenoKvCommitResult + ): Promise | DenoKvCommitResult; get( key: DenoKvStrictKey, options?: DenoKvGetOptions, - ): Promise | DenoKvEntryMaybe + ): Promise | DenoKvEntryMaybe; getMany( keys: DenoKvStrictKey[], options?: DenoKvGetOptions, - ): Promise | DenoKvEntryMaybe[] + ): Promise | DenoKvEntryMaybe[]; list( selector: DenoKvListSelector, options?: DenoKvListOptions, - ): DenoKvListIterator + ): DenoKvListIterator; - listenQueue(handler: (value: unknown) => unknown): Promise + listenQueue(handler: (value: unknown) => unknown): Promise; set( key: DenoKvStrictKey, @@ -990,10 +992,10 @@ export type DenoKv = { ): | Promise | DenoKvCommitError - | DenoKvCommitResult + | DenoKvCommitResult; watch( keys: DenoKvStrictKey[], options?: DenoKvWatchOptions, - ): ReadableStream -} + ): ReadableStream; +}; diff --git a/src/utils.ts b/src/utils.ts index 5816da0..b4cde2b 100644 --- a/src/utils.ts +++ b/src/utils.ts @@ -1,5 +1,5 @@ -import { COMPRESSION_QUALITY_LEVEL, GET_MANY_KEY_LIMIT } from "./constants.ts" -import type { Collection } from "./collection.ts" +import { COMPRESSION_QUALITY_LEVEL, GET_MANY_KEY_LIMIT } from "./constants.ts"; +import type { Collection } from "./collection.ts"; import type { AtomicSetOptions, DenoAtomicOperation, @@ -19,7 +19,7 @@ import type { PreparedEnqueue, QueueMessage, WatchOptions, -} from "./types.ts" +} from "./types.ts"; import { brotliCompressSync, brotliDecompressSync, @@ -27,7 +27,7 @@ import { deserialize as _v8Deserialize, serialize as _v8Serialize, ulid, -} from "./deps.ts" +} from "./deps.ts"; /*************************/ /* */ @@ -41,7 +41,7 @@ import { * @returns A generated id of type KvId. */ export function generateId() { - return ulid() + return ulid(); } /** @@ -51,7 +51,7 @@ export function generateId() { * @returns A document id, or undefined if key is empty. */ export function getDocumentId(key: DenoKvStrictKey) { - return key.at(-1) + return key.at(-1); } /** @@ -62,7 +62,7 @@ export function getDocumentId(key: DenoKvStrictKey) { * @returns An extended kv key. */ export function extendKey(key: KvKey, ...keyParts: KvKey) { - return [...key, ...keyParts] as KvKey + return [...key, ...keyParts] as KvKey; } /** @@ -73,7 +73,7 @@ export function extendKey(key: KvKey, ...keyParts: KvKey) { * @returns true if keys are equal, false if not. */ export function keyEq(k1: KvKey, k2: KvKey) { - return JSON.stringify(k1) === JSON.stringify(k2) + return JSON.stringify(k1) === JSON.stringify(k2); } /** @@ -90,15 +90,15 @@ export async function createSecondaryIndexKeyPrefix( collection: Collection, ) { // Serialize and compress index value - const serialized = await collection._serializer.serialize(value) - const compressed = await collection._serializer.compress(serialized) + const serialized = await collection._serializer.serialize(value); + const compressed = await collection._serializer.compress(serialized); // Create prefix key return extendKey( collection._keys.secondaryIndex, index as KvId, compressed, - ) + ); } /** @@ -110,12 +110,12 @@ export async function createSecondaryIndexKeyPrefix( export function isKvObject(value: unknown) { // If value is null or undefined, return false if (value === null || value === undefined) { - return false + return false; } // If value is not an object, return false if (typeof value !== "object") { - return false + return false; } // If value is an instance of other KvValue objects, return false @@ -141,11 +141,11 @@ export function isKvObject(value: unknown) { value instanceof DataView || value instanceof Error ) { - return false + return false; } // Return true after performing all checks - return true + return true; } /** @@ -169,42 +169,42 @@ export async function setIndices( // Set primary indices using primary index list for (const index of collection._primaryIndexList) { // Get the index value from data, if undefined continue to next index - const indexValue = data[index] as KvId | undefined - if (typeof indexValue === "undefined") continue + const indexValue = data[index] as KvId | undefined; + if (typeof indexValue === "undefined") continue; // Serialize and compress - const serialized = await collection._serializer.serialize(indexValue) - const compressed = await collection._serializer.compress(serialized) + const serialized = await collection._serializer.serialize(indexValue); + const compressed = await collection._serializer.compress(serialized); // Create the index key const indexKey = extendKey( collection._keys.primaryIndex, index, compressed, - ) + ); // Create the index document value const indexEntry: IndexDataEntry = { ...value, __id__: id, - } + }; // Add index insertion to atomic operation, check for exisitng indices atomic.set(indexKey, indexEntry, options).check({ key: indexKey, versionstamp: null, - }) + }); } // Set secondary indices using secondary index list for (const index of collection._secondaryIndexList) { // Get the index value from data, if undefined continue to next index - const indexValue = data[index] as KvId | undefined - if (typeof indexValue === "undefined") continue + const indexValue = data[index] as KvId | undefined; + if (typeof indexValue === "undefined") continue; // Serialize and compress - const serialized = await collection._serializer.serialize(indexValue) - const compressed = await collection._serializer.compress(serialized) + const serialized = await collection._serializer.serialize(indexValue); + const compressed = await collection._serializer.compress(serialized); // Create the index key const indexKey = extendKey( @@ -212,10 +212,10 @@ export async function setIndices( index, compressed, id, - ) + ); // Add index insertion to atomic operation, check for exisitng indices - atomic.set(indexKey, value, options) + atomic.set(indexKey, value, options); } } @@ -235,27 +235,27 @@ export async function checkIndices( // Check primary indices using primary index list for (const index of collection._primaryIndexList) { // Get the index value from data, if undefined continue to next index - const indexValue = data[index] as KvId | undefined + const indexValue = data[index] as KvId | undefined; if (typeof indexValue === "undefined") { - continue + continue; } // Serialize and compress - const serialized = await collection._serializer.serialize(indexValue) - const compressed = await collection._serializer.compress(serialized) + const serialized = await collection._serializer.serialize(indexValue); + const compressed = await collection._serializer.compress(serialized); // Create the index key const indexKey = extendKey( collection._keys.primaryIndex, index, compressed, - ) + ); // Check for existing index entry atomic.check({ key: indexKey, versionstamp: null, - }) + }); } } @@ -277,33 +277,33 @@ export async function deleteIndices( // Delete primary indices using primary index list for (const index of collection._primaryIndexList) { // Get the index value from data, if undefined continue to next index - const indexValue = data[index] as KvId | undefined - if (typeof indexValue === "undefined") continue + const indexValue = data[index] as KvId | undefined; + if (typeof indexValue === "undefined") continue; // Serialize and compress - const serialized = await collection._serializer.serialize(indexValue) - const compressed = await collection._serializer.compress(serialized) + const serialized = await collection._serializer.serialize(indexValue); + const compressed = await collection._serializer.compress(serialized); // Create the index key const indexKey = extendKey( collection._keys.primaryIndex, index, compressed, - ) + ); // Add index deletion to atomic operation - atomic.delete(indexKey) + atomic.delete(indexKey); } // Delete seocndary indices using secondary index list for (const index of collection._secondaryIndexList) { // Get the index value from data, if undefined continue to next index - const indexValue = data[index] as KvId | undefined - if (typeof indexValue === "undefined") continue + const indexValue = data[index] as KvId | undefined; + if (typeof indexValue === "undefined") continue; // Serialize and compress - const serialized = await collection._serializer.serialize(indexValue) - const compressed = await collection._serializer.compress(serialized) + const serialized = await collection._serializer.serialize(indexValue); + const compressed = await collection._serializer.compress(serialized); // Create the index key const indexKey = extendKey( @@ -311,10 +311,10 @@ export async function deleteIndices( index, compressed, id, - ) + ); // Add index deletion to atomic operation - atomic.delete(indexKey) + atomic.delete(indexKey); } } @@ -332,20 +332,20 @@ export async function kvGetMany( options?: FindManyOptions, ) { // Initialize sliced keys list - const slicedKeys: DenoKvStrictKey[][] = [] + const slicedKeys: DenoKvStrictKey[][] = []; // Slice keys based on getMany keys limit for (let i = 0; i < keys.length; i += GET_MANY_KEY_LIMIT) { - slicedKeys.push(keys.slice(i, i + GET_MANY_KEY_LIMIT)) + slicedKeys.push(keys.slice(i, i + GET_MANY_KEY_LIMIT)); } // Execute getMany for each sliced keys entry const slicedEntries = await allFulfilled(slicedKeys.map((keys) => { - return kv.getMany(keys, options) - })) + return kv.getMany(keys, options); + })); // Return accumulated result - return slicedEntries.flat() + return slicedEntries.flat(); } /** @@ -359,14 +359,14 @@ export async function allFulfilled( values: T[], ) { // Get settled results - const settled = await Promise.allSettled(values) + const settled = await Promise.allSettled(values); // Return fulfilled values return settled.reduce( (acc, result) => result.status === "fulfilled" ? [...acc, result.value] : acc, [] as Awaited[], - ) + ); } /** @@ -388,12 +388,12 @@ export function prepareEnqueue( __is_undefined__: data === undefined, __data__: data, __handlerId__: createHandlerId(baseKey, options?.topic), - } + }; // Create keys if undelivered const keysIfUndelivered = options?.idsIfUndelivered?.map((id) => extendKey(undeliveredKey, id) - ) + ); // Return prepared enqueue return { @@ -402,7 +402,7 @@ export function prepareEnqueue( ...options, keysIfUndelivered, }, - } + }; } /** @@ -416,7 +416,7 @@ export function createHandlerId( key: KvKey, topic: string | undefined, ) { - return `${JSON.stringify(key)}${topic ?? ""}` + return `${JSON.stringify(key)}${topic ?? ""}`; } /** @@ -432,11 +432,11 @@ export function parseQueueMessage( if (!msg) { return { ok: false, - } + }; } // Cast message as QueueMessage - const _msg = msg as QueueMessage + const _msg = msg as QueueMessage; // Check correctness of message parts if ( @@ -445,14 +445,14 @@ export function parseQueueMessage( ) { return { ok: false, - } + }; } // Return parsed queue message return { ok: true, msg: _msg, - } + }; } /** @@ -469,24 +469,24 @@ export function createListSelector( // Create start key const start = typeof options?.startId !== "undefined" ? [...prefixKey, options.startId!] - : undefined + : undefined; // Create end key const end = typeof options?.endId !== "undefined" ? [...prefixKey, options.endId!] - : undefined + : undefined; // Conditionally set prefix key const prefix = Array.isArray(start) && Array.isArray(end) ? undefined! - : prefixKey + : prefixKey; // Return list selector return { prefix, start, end, - } + }; } /** @@ -498,11 +498,11 @@ export function createListSelector( export function createListOptions( options: ListOptions | undefined, ) { - const limit = options?.limit && options.limit + (options.offset ?? 0) + const limit = options?.limit && options.limit + (options.offset ?? 0); return { ...options, limit, - } + }; } /** @@ -522,7 +522,7 @@ export function selectsAll( !options?.filter && !options?.limit && !options?.offset - ) + ); } export function createWatcher( @@ -531,37 +531,37 @@ export function createWatcher( keys: KvKey[], fn: (entries: DenoKvEntryMaybe[]) => unknown, ): { - promise: Promise - cancel: () => Promise + promise: Promise; + cancel: () => Promise; } { // Create watch stream - const stream = kv.watch(keys, options) - const reader = stream.getReader() + const stream = kv.watch(keys, options); + const reader = stream.getReader(); // Receive incoming updates const promise = async () => { - let isDone = false + let isDone = false; while (!isDone) { try { - const { value, done } = await reader.read() + const { value, done } = await reader.read(); if (value) { - await fn(value) + await fn(value); } - isDone = done + isDone = done; } catch (_) { - isDone = true + isDone = true; } } - } + }; // Create cancel function async function cancel() { - reader.releaseLock() - await stream.cancel() + reader.releaseLock(); + await stream.cancel(); } - return { promise: promise(), cancel } + return { promise: promise(), cancel }; } /** @@ -573,9 +573,9 @@ export function createWatcher( export function compress(data: Uint8Array) { const buffer = brotliCompressSync(data, { params: { [constants.BROTLI_PARAM_QUALITY]: COMPRESSION_QUALITY_LEVEL }, - }) + }); - return new Uint8Array(buffer) + return new Uint8Array(buffer); } /** @@ -587,9 +587,9 @@ export function compress(data: Uint8Array) { export function decompress(data: Uint8Array) { const buffer = brotliDecompressSync(data, { params: { [constants.BROTLI_PARAM_QUALITY]: COMPRESSION_QUALITY_LEVEL }, - }) + }); - return new Uint8Array(buffer) + return new Uint8Array(buffer); } /** @@ -599,7 +599,7 @@ export function decompress(data: Uint8Array) { * @returns A serialized value. */ export function v8Serialize(value: unknown): Uint8Array { - return _v8Serialize(beforeV8Serialize(value)) + return new Uint8Array(_v8Serialize(beforeV8Serialize(value))); } /** @@ -611,15 +611,15 @@ export function v8Serialize(value: unknown): Uint8Array { export function v8Deserialize( value: Uint8Array, ): T { - return afterV8Serialize(_v8Deserialize(value)) as T + return afterV8Serialize(_v8Deserialize(value)) as T; } export type JSONError = { - message: string - name: string - cause?: string - stack?: string -} + message: string; + name: string; + cause?: string; + stack?: string; +}; export enum TypeKey { Undefined = "__undefined__", @@ -661,19 +661,19 @@ export function beforeV8Serialize(value: unknown): unknown { Object.entries(value as KvObject).map(( [key, val], ) => [key, beforeV8Serialize(val)]), - ) + ); } // Array if (Array.isArray(value)) { - return value.map((val) => beforeV8Serialize(val)) + return value.map((val) => beforeV8Serialize(val)); } // Set if (value instanceof Set) { return new Set( Array.from(value.values()).map((v) => beforeV8Serialize(v)), - ) + ); } // Map @@ -682,10 +682,10 @@ export function beforeV8Serialize(value: unknown): unknown { Array.from(value.entries()).map(( [k, v], ) => [k, beforeV8Serialize(v)]), - ) + ); } - return value + return value; } /** @@ -701,26 +701,26 @@ export function afterV8Serialize(value: unknown): unknown { value === null || typeof value !== "object" ) { - return value + return value; } // KvObject if (isKvObject(value)) { return Object.fromEntries( Object.entries(value).map(([k, v]) => [k, afterV8Serialize(v)]), - ) + ); } // Array if (Array.isArray(value)) { - return value.map((v) => afterV8Serialize(v)) + return value.map((v) => afterV8Serialize(v)); } // Set if (value instanceof Set) { return new Set( Array.from(value.values()).map((v) => afterV8Serialize(v)), - ) + ); } // Map @@ -729,10 +729,10 @@ export function afterV8Serialize(value: unknown): unknown { Array.from(value.entries()).map(( [k, v], ) => [k, afterV8Serialize(v)]), - ) + ); } - return value + return value; } /** @@ -753,8 +753,8 @@ export function afterV8Serialize(value: unknown): unknown { * @returns Serialized value. */ export function jsonSerialize(value: unknown): Uint8Array { - const str = jsonStringify(value) - return new TextEncoder().encode(str) + const str = jsonStringify(value); + return new TextEncoder().encode(str); } /** @@ -777,8 +777,8 @@ export function jsonSerialize(value: unknown): Uint8Array { * @returns Deserialized value. */ export function jsonDeserialize(value: Uint8Array): T { - const str = new TextDecoder().decode(value) - return jsonParse(str) + const str = new TextDecoder().decode(value); + return jsonParse(str); } /** @@ -800,7 +800,7 @@ export function jsonDeserialize(value: Uint8Array): T { * @returns */ export function jsonStringify(value: unknown, space?: number | string): string { - return JSON.stringify(_replacer(value), replacer, space) + return JSON.stringify(_replacer(value), replacer, space); } /** @@ -823,7 +823,7 @@ export function jsonStringify(value: unknown, space?: number | string): string { * @returns */ export function jsonParse(value: string): T { - return postReviver(JSON.parse(value, reviver)) as T + return postReviver(JSON.parse(value, reviver)) as T; } /** @@ -834,7 +834,7 @@ export function jsonParse(value: string): T { * @returns */ export function replacer(_key: string, value: unknown) { - return _replacer(value) + return _replacer(value); } /** @@ -845,7 +845,7 @@ export function replacer(_key: string, value: unknown) { * @returns */ export function reviver(_key: string, value: unknown) { - return _reviver(value) + return _reviver(value); } /** @@ -859,47 +859,47 @@ export function _replacer(value: unknown): unknown { if (value === undefined) { return { [TypeKey.Undefined]: false, - } + }; } // NaN if (Number.isNaN(value)) { return { [TypeKey.NaN]: false, - } + }; } // Infinity if (value === Infinity) { return { [TypeKey.Infinity]: false, - } + }; } // bigint if (typeof value === "bigint") { return { [TypeKey.BigInt]: value.toString(), - } + }; } // Date if (value instanceof Date) { return { [TypeKey.Date]: value.toISOString(), - } + }; } // Array if (Array.isArray(value)) { - return value.map(_replacer) + return value.map(_replacer); } // Set if (value instanceof Set) { return { [TypeKey.Set]: Array.from(value.values()).map(_replacer), - } + }; } // Map @@ -908,14 +908,14 @@ export function _replacer(value: unknown): unknown { [TypeKey.Map]: Array.from(value.entries()).map(( [k, v], ) => [k, _replacer(v)]), - } + }; } // RegExp if (value instanceof RegExp) { return { [TypeKey.RegExp]: value.source, - } + }; } // Error @@ -925,118 +925,118 @@ export function _replacer(value: unknown): unknown { name: value.name, stack: value.stack, cause: jsonStringify(value.cause), - } + }; return { [TypeKey.Error]: jsonError, - } + }; } // Int8Array if (value instanceof Int8Array) { return { [TypeKey.Int8Array]: Array.from(value), - } + }; } // Int16Array if (value instanceof Int16Array) { return { [TypeKey.Int16Array]: Array.from(value), - } + }; } // Int32Array if (value instanceof Int32Array) { return { [TypeKey.Int32Array]: Array.from(value), - } + }; } // BigInt64Array if (value instanceof BigInt64Array) { return { [TypeKey.BigInt64Array]: Array.from(value), - } + }; } // Uint8Array if (value instanceof Uint8Array) { return { [TypeKey.Uint8Array]: Array.from(value), - } + }; } // Uint16Array if (value instanceof Uint16Array) { return { [TypeKey.Uint16Array]: Array.from(value), - } + }; } // Uint32Array if (value instanceof Uint32Array) { return { [TypeKey.Uint32Array]: Array.from(value), - } + }; } // BigUint64Array if (value instanceof BigUint64Array) { return { [TypeKey.BigUint64Array]: Array.from(value), - } + }; } // Uint8ClampedArray if (value instanceof Uint8ClampedArray) { return { [TypeKey.Uint8ClampedArray]: Array.from(value), - } + }; } // Float16Array if (value instanceof Float16Array) { return { [TypeKey.Float16Array]: Array.from(value), - } + }; } // Float32Array if (value instanceof Float32Array) { return { [TypeKey.Float32Array]: Array.from(value), - } + }; } // Float64Array if (value instanceof Float64Array) { return { [TypeKey.Float64Array]: Array.from(value), - } + }; } // ArrayBuffer if (value instanceof ArrayBuffer) { return { [TypeKey.ArrayBuffer]: Array.from(new Uint8Array(value)), - } + }; } // DataView if (value instanceof DataView) { return { [TypeKey.DataView]: Array.from(new Uint8Array(value.buffer)), - } + }; } // KvObject if (isKvObject(value)) { return Object.fromEntries( Object.entries(value as KvObject).map(([k, v]) => [k, _replacer(v)]), - ) + ); } - return value + return value; } /** @@ -1052,32 +1052,32 @@ export function _reviver(value: unknown): unknown { value === undefined || typeof value !== "object" ) { - return value + return value; } // bigint if (TypeKey.BigInt in value) { - return BigInt(mapValue(TypeKey.BigInt, value)) + return BigInt(mapValue(TypeKey.BigInt, value)); } // Date if (TypeKey.Date in value) { - return new Date(mapValue(TypeKey.Date, value)) + return new Date(mapValue(TypeKey.Date, value)); } // NaN if (TypeKey.NaN in value) { - return NaN + return NaN; } // Infnity if (TypeKey.Infinity in value) { - return Infinity + return Infinity; } // RegExp if (TypeKey.RegExp in value) { - return new RegExp(mapValue(TypeKey.RegExp, value)) + return new RegExp(mapValue(TypeKey.RegExp, value)); } // Error @@ -1085,113 +1085,113 @@ export function _reviver(value: unknown): unknown { const { message, stack, cause, ...rest } = mapValue( TypeKey.Error, value, - ) + ); const error = new Error(message, { cause: cause ? jsonParse(cause) : undefined, ...rest, - }) + }); - error.stack = stack - return error + error.stack = stack; + return error; } // Int8Array if (TypeKey.Int8Array in value) { - return Int8Array.from(mapValue(TypeKey.Int8Array, value)) + return Int8Array.from(mapValue(TypeKey.Int8Array, value)); } // Int16Array if (TypeKey.Int16Array in value) { - return Int16Array.from(mapValue(TypeKey.Int16Array, value)) + return Int16Array.from(mapValue(TypeKey.Int16Array, value)); } // Int32Array if (TypeKey.Int32Array in value) { - return Int32Array.from(mapValue(TypeKey.Int32Array, value)) + return Int32Array.from(mapValue(TypeKey.Int32Array, value)); } // BigInt64Array if (TypeKey.BigInt64Array in value) { - return BigInt64Array.from(mapValue(TypeKey.BigInt64Array, value)) + return BigInt64Array.from(mapValue(TypeKey.BigInt64Array, value)); } // Uint8Array if (TypeKey.Uint8Array in value) { - return Uint8Array.from(mapValue(TypeKey.Uint8Array, value)) + return Uint8Array.from(mapValue(TypeKey.Uint8Array, value)); } // Uint16Array if (TypeKey.Uint16Array in value) { - return Uint16Array.from(mapValue(TypeKey.Uint16Array, value)) + return Uint16Array.from(mapValue(TypeKey.Uint16Array, value)); } // Uint32Array if (TypeKey.Uint32Array in value) { - return Uint32Array.from(mapValue(TypeKey.Uint32Array, value)) + return Uint32Array.from(mapValue(TypeKey.Uint32Array, value)); } // BigUint64Array if (TypeKey.BigUint64Array in value) { - return BigUint64Array.from(mapValue(TypeKey.BigUint64Array, value)) + return BigUint64Array.from(mapValue(TypeKey.BigUint64Array, value)); } // Uint8ClampedArray if (TypeKey.Uint8ClampedArray in value) { - return Uint8ClampedArray.from(mapValue(TypeKey.Uint8ClampedArray, value)) + return Uint8ClampedArray.from(mapValue(TypeKey.Uint8ClampedArray, value)); } // Float16Array if (TypeKey.Float16Array in value) { - return Float16Array.from(mapValue(TypeKey.Float16Array, value)) + return Float16Array.from(mapValue(TypeKey.Float16Array, value)); } // Float32Array if (TypeKey.Float32Array in value) { - return Float32Array.from(mapValue(TypeKey.Float32Array, value)) + return Float32Array.from(mapValue(TypeKey.Float32Array, value)); } // Float64Array if (TypeKey.Float64Array in value) { - return Float64Array.from(mapValue(TypeKey.Float64Array, value)) + return Float64Array.from(mapValue(TypeKey.Float64Array, value)); } // ArrayBuffer if (TypeKey.ArrayBuffer in value) { - const uint8array = Uint8Array.from(mapValue(TypeKey.ArrayBuffer, value)) - return uint8array.buffer + const uint8array = Uint8Array.from(mapValue(TypeKey.ArrayBuffer, value)); + return uint8array.buffer; } // DataView if (TypeKey.DataView in value) { - const uint8array = Uint8Array.from(mapValue(TypeKey.DataView, value)) - return new DataView(uint8array.buffer) + const uint8array = Uint8Array.from(mapValue(TypeKey.DataView, value)); + return new DataView(uint8array.buffer); } // Set if (TypeKey.Set in value) { - return new Set(mapValue>(TypeKey.Set, value)) + return new Set(mapValue>(TypeKey.Set, value)); } // Map if (TypeKey.Map in value) { - return new Map(mapValue>(TypeKey.Map, value)) + return new Map(mapValue>(TypeKey.Map, value)); } // Array if (Array.isArray(value)) { - return value.map(_reviver) + return value.map(_reviver); } // KvObject if (isKvObject(value)) { return Object.fromEntries( Object.entries(value).map(([k, v]) => [k, _reviver(v)]), - ) + ); } // Return value - return value + return value; } /** @@ -1207,41 +1207,41 @@ export function postReviver(value: T): T { value === null || typeof value !== "object" ) { - return value + return value; } // undefined if (TypeKey.Undefined in value) { - return undefined as T + return undefined as T; } // Array if (Array.isArray(value)) { - return value.map(postReviver) as T + return value.map(postReviver) as T; } // Set if (value instanceof Set) { return new Set( Array.from(value.values()).map(postReviver), - ) as T + ) as T; } // Map if (value instanceof Map) { return new Map( Array.from(value.entries()).map(([k, v]) => [k, postReviver(v)]), - ) as T + ) as T; } // KvObject if (isKvObject(value)) { return Object.fromEntries( Object.entries(value).map(([k, v]) => [k, postReviver(v)]), - ) as T + ) as T; } - return value + return value; } /** @@ -1252,5 +1252,5 @@ export function postReviver(value: T): T { * @returns Mapped value. */ export function mapValue(key: string, value: unknown) { - return (value as Record)[key] + return (value as Record)[key]; } diff --git a/tests/collection/add.test.ts b/tests/collection/add.test.ts index 38f06e2..b58c416 100644 --- a/tests/collection/add.test.ts +++ b/tests/collection/add.test.ts @@ -1,42 +1,42 @@ -import { assert } from "../test.deps.ts" -import { mockUser1, mockUserInvalid } from "../mocks.ts" -import { useDb } from "../utils.ts" +import { assert } from "../test.deps.ts"; +import { mockUser1, mockUserInvalid } from "../mocks.ts"; +import { useDb } from "../utils.ts"; Deno.test("collection - add", async (t) => { await t.step("Should add new document entry to collection", async () => { await useDb(async (db) => { - const cr = await db.users.add(mockUser1) - assert(cr.ok) + const cr = await db.users.add(mockUser1); + assert(cr.ok); - const doc = await db.users.find(cr.id) - assert(doc !== null) - assert(doc.value.username === mockUser1.username) - }) - }) + const doc = await db.users.find(cr.id); + assert(doc !== null); + assert(doc.value.username === mockUser1.username); + }); + }); await t.step( "Should successfully parse and add new document entry to collection", async () => { await useDb(async (db) => { - const cr = await db.z_users.add(mockUser1) - assert(cr.ok) + const cr = await db.z_users.add(mockUser1); + assert(cr.ok); - const doc = await db.z_users.find(cr.id) + const doc = await db.z_users.find(cr.id); - assert(doc !== null) - assert(doc.value.username === mockUser1.username) - }) + assert(doc !== null); + assert(doc.value.username === mockUser1.username); + }); }, - ) + ); await t.step( "Should fail parse and add new document entry to collection", async () => { await useDb(async (db) => { - let assertion = false - await db.z_users.add(mockUserInvalid).catch(() => assertion = true) - assert(assertion) - }) + let assertion = false; + await db.z_users.add(mockUserInvalid).catch(() => assertion = true); + assert(assertion); + }); }, - ) -}) + ); +}); diff --git a/tests/collection/addMany.test.ts b/tests/collection/addMany.test.ts index 087fb0e..bcb664c 100644 --- a/tests/collection/addMany.test.ts +++ b/tests/collection/addMany.test.ts @@ -1,57 +1,57 @@ -import { assert } from "../test.deps.ts" -import { generateInvalidUsers, generateUsers, useDb } from "../utils.ts" +import { assert } from "../test.deps.ts"; +import { generateInvalidUsers, generateUsers, useDb } from "../utils.ts"; Deno.test("collection - addMany", async (t) => { await t.step( "Should successfully add 1000 documents to the collection", async () => { await useDb(async (db) => { - const users = generateUsers(1_000) - const cr = await db.users.addMany(users) - assert(cr.ok) + const users = generateUsers(1_000); + const cr = await db.users.addMany(users); + assert(cr.ok); - const { result } = await db.users.getMany() + const { result } = await db.users.getMany(); - assert(result.length === users.length) + assert(result.length === users.length); assert( users.every((user) => result.some((doc) => doc.value.username === user.username) ), - ) - }) + ); + }); }, - ) + ); await t.step( "Should successfully parse and add 1000 documents to the collection", async () => { await useDb(async (db) => { - const users = generateUsers(1_000) - const cr = await db.z_users.addMany(users) - assert(cr.ok) + const users = generateUsers(1_000); + const cr = await db.z_users.addMany(users); + assert(cr.ok); - const { result } = await db.z_users.getMany() - assert(result.length === users.length) + const { result } = await db.z_users.getMany(); + assert(result.length === users.length); assert( users.every((user) => result.some((doc) => doc.value.username === user.username) ), - ) - }) + ); + }); }, - ) + ); await t.step( "Should fail to parse and add 1000 documents to the collection", async () => { await useDb(async (db) => { - const users = generateInvalidUsers(1_000) - let assertion = false + const users = generateInvalidUsers(1_000); + let assertion = false; - await db.z_users.addMany(users).catch(() => assertion = true) + await db.z_users.addMany(users).catch(() => assertion = true); - assert(assertion) - }) + assert(assertion); + }); }, - ) -}) + ); +}); diff --git a/tests/collection/count.test.ts b/tests/collection/count.test.ts index 80d24f7..d900eec 100644 --- a/tests/collection/count.test.ts +++ b/tests/collection/count.test.ts @@ -1,22 +1,22 @@ -import { assert } from "../test.deps.ts" -import { generateUsers } from "../utils.ts" -import { useDb } from "../utils.ts" +import { assert } from "../test.deps.ts"; +import { generateUsers } from "../utils.ts"; +import { useDb } from "../utils.ts"; Deno.test("collection - count", async (t) => { await t.step( "Should correctly count total number of documents in the collection", async () => { await useDb(async (db) => { - const count1 = await db.users.count() - assert(count1 === 0) + const count1 = await db.users.count(); + assert(count1 === 0); - const users = generateUsers(1_000) - const cr = await db.users.addMany(users) - assert(cr.ok) + const users = generateUsers(1_000); + const cr = await db.users.addMany(users); + assert(cr.ok); - const count2 = await db.users.count() - assert(count2 === users.length) - }) + const count2 = await db.users.count(); + assert(count2 === users.length); + }); }, - ) -}) + ); +}); diff --git a/tests/collection/delete.test.ts b/tests/collection/delete.test.ts index f5238a1..6a2e03a 100644 --- a/tests/collection/delete.test.ts +++ b/tests/collection/delete.test.ts @@ -1,47 +1,47 @@ -import { assert } from "../test.deps.ts" -import { mockUser1 } from "../mocks.ts" -import { generateUsers, useDb } from "../utils.ts" +import { assert } from "../test.deps.ts"; +import { mockUser1 } from "../mocks.ts"; +import { generateUsers, useDb } from "../utils.ts"; Deno.test("collection - delete", async (t) => { await t.step( "Should successfully delete a document from the collection", async () => { await useDb(async (db) => { - const cr = await db.users.add(mockUser1) - const count1 = await db.users.count() + const cr = await db.users.add(mockUser1); + const count1 = await db.users.count(); - assert(cr.ok) - assert(count1 === 1) + assert(cr.ok); + assert(count1 === 1); - await db.users.delete(cr.id) + await db.users.delete(cr.id); - const count2 = await db.users.count() - const doc = await db.users.find(cr.id) + const count2 = await db.users.count(); + const doc = await db.users.find(cr.id); - assert(count2 === 0) - assert(doc === null) - }) + assert(count2 === 0); + assert(doc === null); + }); }, - ) + ); await t.step( "Should successfully delete 1000 documents from the collection", async () => { await useDb(async (db) => { - const users = generateUsers(1_000) - const crs = await db.users.addMany(users) - const count1 = await db.users.count() - const { result: ids } = await db.users.map((doc) => doc.id) + const users = generateUsers(1_000); + const crs = await db.users.addMany(users); + const count1 = await db.users.count(); + const { result: ids } = await db.users.map((doc) => doc.id); - assert(crs.ok) - assert(count1 === users.length) - assert(ids.length === users.length) + assert(crs.ok); + assert(count1 === users.length); + assert(ids.length === users.length); - await db.users.delete(...ids) + await db.users.delete(...ids); - const count2 = await db.users.count() - assert(count2 === 0) - }) + const count2 = await db.users.count(); + assert(count2 === 0); + }); }, - ) -}) + ); +}); diff --git a/tests/collection/deleteMany.test.ts b/tests/collection/deleteMany.test.ts index f938b65..15a0501 100644 --- a/tests/collection/deleteMany.test.ts +++ b/tests/collection/deleteMany.test.ts @@ -1,20 +1,20 @@ -import { assert } from "../test.deps.ts" -import { generateUsers, useDb } from "../utils.ts" +import { assert } from "../test.deps.ts"; +import { generateUsers, useDb } from "../utils.ts"; Deno.test("collection - deleteMany", async (t) => { await t.step("Should delete all documents from the collection", async () => { await useDb(async (db) => { - const users = generateUsers(1_000) - const cr = await db.users.addMany(users) - assert(cr.ok) + const users = generateUsers(1_000); + const cr = await db.users.addMany(users); + assert(cr.ok); - const count1 = await db.users.count() - assert(count1 === users.length) + const count1 = await db.users.count(); + assert(count1 === users.length); - await db.users.deleteMany() + await db.users.deleteMany(); - const count2 = await db.users.count() - assert(count2 === 0) - }) - }) -}) + const count2 = await db.users.count(); + assert(count2 === 0); + }); + }); +}); diff --git a/tests/collection/enqueue.test.ts b/tests/collection/enqueue.test.ts index 6f19eae..1885919 100644 --- a/tests/collection/enqueue.test.ts +++ b/tests/collection/enqueue.test.ts @@ -4,100 +4,100 @@ import { type KvValue, model, type QueueMessage, -} from "../../mod.ts" -import { createHandlerId } from "../../src/utils.ts" -import { assert } from "../test.deps.ts" -import { createResolver, useDb, useKv } from "../utils.ts" +} from "../../mod.ts"; +import { createHandlerId } from "../../src/utils.ts"; +import { assert } from "../test.deps.ts"; +import { createResolver, useDb, useKv } from "../utils.ts"; Deno.test("collection - enqueue", async (t) => { await t.step("Should enqueue message with string data", async () => { await useKv(async (kv) => { - const data = "data" - const undeliveredId = "undelivered" - const sleeper = createResolver() + const data = "data"; + const undeliveredId = "undelivered"; + const sleeper = createResolver(); const db = kvdex(kv, { numbers: collection(model()), - }) + }); - const handlerId = createHandlerId(db.numbers._keys.base, undefined) + const handlerId = createHandlerId(db.numbers._keys.base, undefined); - let assertion = false + let assertion = false; const listener = kv.listenQueue((msg) => { - const qMsg = msg as QueueMessage - assertion = qMsg.__handlerId__ === handlerId && qMsg.__data__ === data - sleeper.resolve() - }) + const qMsg = msg as QueueMessage; + assertion = qMsg.__handlerId__ === handlerId && qMsg.__data__ === data; + sleeper.resolve(); + }); await db.numbers.enqueue(data, { idsIfUndelivered: [undeliveredId], - }) + }); - await sleeper.promise + await sleeper.promise; - const undelivered = await db.numbers.findUndelivered(undeliveredId) - assert(assertion || typeof undelivered?.value === typeof data) + const undelivered = await db.numbers.findUndelivered(undeliveredId); + assert(assertion || typeof undelivered?.value === typeof data); - return async () => await listener - }) - }) + return async () => await listener; + }); + }); await t.step("Should enqueue message in correct topic", async () => { await useDb(async (db) => { - const data = "data" - const undeliveredId = "undelivered" - const topic = "topic" - const sleeper = createResolver() + const data = "data"; + const undeliveredId = "undelivered"; + const topic = "topic"; + const sleeper = createResolver(); - let assertion1 = false - let assertion2 = true + let assertion1 = false; + let assertion2 = true; const l1 = db.users.listenQueue(() => { - assertion1 = true - sleeper.resolve() - }, { topic }) + assertion1 = true; + sleeper.resolve(); + }, { topic }); - const l2 = db.users.listenQueue(() => assertion2 = false) + const l2 = db.users.listenQueue(() => assertion2 = false); await db.users.enqueue("data", { idsIfUndelivered: [undeliveredId], topic, - }) + }); - await sleeper.promise + await sleeper.promise; - const undelivered = await db.users.findUndelivered(undeliveredId) - assert(assertion1 || typeof undelivered?.value === typeof data) - assert(assertion2) + const undelivered = await db.users.findUndelivered(undeliveredId); + assert(assertion1 || typeof undelivered?.value === typeof data); + assert(assertion2); - return async () => await Promise.all([l1, l2]) - }) - }) + return async () => await Promise.all([l1, l2]); + }); + }); await t.step("Should enqueue message with undefined data", async () => { await useDb(async (db) => { - const data = undefined - const undeliveredId = "undelivered" - const sleeper = createResolver() + const data = undefined; + const undeliveredId = "undelivered"; + const sleeper = createResolver(); - let assertion = false + let assertion = false; const listener = db.users.listenQueue((msg) => { - assertion = msg === data - sleeper.resolve() - }) + assertion = msg === data; + sleeper.resolve(); + }); await db.users.enqueue(data, { idsIfUndelivered: [undeliveredId], - }) + }); - await sleeper.promise + await sleeper.promise; - const undelivered = await db.users.findUndelivered(undeliveredId) - assert(assertion || typeof undelivered?.value === typeof data) + const undelivered = await db.users.findUndelivered(undeliveredId); + assert(assertion || typeof undelivered?.value === typeof data); - return async () => await listener - }) - }) -}) + return async () => await listener; + }); + }); +}); diff --git a/tests/collection/find.test.ts b/tests/collection/find.test.ts index 9a4fdcc..b45e4d0 100644 --- a/tests/collection/find.test.ts +++ b/tests/collection/find.test.ts @@ -1,25 +1,25 @@ -import { assert } from "../test.deps.ts" -import { mockUser1 } from "../mocks.ts" -import { useDb } from "../utils.ts" +import { assert } from "../test.deps.ts"; +import { mockUser1 } from "../mocks.ts"; +import { useDb } from "../utils.ts"; Deno.test("collection - find", async (t) => { await t.step("Should find document by id", async () => { await useDb(async (db) => { - const id = "id" + const id = "id"; - const cr = await db.users.set(id, mockUser1) - assert(cr.ok) + const cr = await db.users.set(id, mockUser1); + assert(cr.ok); - const doc = await db.users.find(id) - assert(doc !== null) - assert(doc.value.username === mockUser1.username) - }) - }) + const doc = await db.users.find(id); + assert(doc !== null); + assert(doc.value.username === mockUser1.username); + }); + }); await t.step("Should not find document by non-existing id", async () => { await useDb(async (db) => { - const doc = await db.users.find("123") - assert(doc === null) - }) - }) -}) + const doc = await db.users.find("123"); + assert(doc === null); + }); + }); +}); diff --git a/tests/collection/findMany.test.ts b/tests/collection/findMany.test.ts index a517d72..dbc1e64 100644 --- a/tests/collection/findMany.test.ts +++ b/tests/collection/findMany.test.ts @@ -1,32 +1,32 @@ -import { assert } from "../test.deps.ts" -import { generateUsers, useDb } from "../utils.ts" +import { assert } from "../test.deps.ts"; +import { generateUsers, useDb } from "../utils.ts"; Deno.test("collection - findMany", async (t) => { await t.step("Should find all documents", async () => { await useDb(async (db) => { - const users = generateUsers(1_000) - const cr = await db.users.addMany(users) - assert(cr.ok) + const users = generateUsers(1_000); + const cr = await db.users.addMany(users); + assert(cr.ok); - const { result: ids } = await db.users.map((doc) => doc.id) - const docs = await db.users.findMany(ids) - assert(docs.length === users.length) + const { result: ids } = await db.users.map((doc) => doc.id); + const docs = await db.users.findMany(ids); + assert(docs.length === users.length); assert( users.every((user) => docs.some((doc) => doc.value.username === user.username) ), - ) - }) - }) + ); + }); + }); await t.step("Should not find any documents", async () => { await useDb(async (db) => { - const users = generateUsers(10) - const cr = await db.users.addMany(users) - assert(cr.ok) + const users = generateUsers(10); + const cr = await db.users.addMany(users); + assert(cr.ok); - const docs = await db.users.findMany(["", "", ""]) - assert(docs.length === 0) - }) - }) -}) + const docs = await db.users.findMany(["", "", ""]); + assert(docs.length === 0); + }); + }); +}); diff --git a/tests/collection/forEach.test.ts b/tests/collection/forEach.test.ts index 5972c8f..4f7c394 100644 --- a/tests/collection/forEach.test.ts +++ b/tests/collection/forEach.test.ts @@ -1,28 +1,28 @@ -import type { Document } from "../../mod.ts" -import { assert } from "../test.deps.ts" -import type { User } from "../models.ts" -import { generateUsers } from "../utils.ts" -import { useDb } from "../utils.ts" +import type { Document } from "../../mod.ts"; +import { assert } from "../test.deps.ts"; +import type { User } from "../models.ts"; +import { generateUsers } from "../utils.ts"; +import { useDb } from "../utils.ts"; Deno.test("collection - forEach", async (t) => { await t.step( "Should run callback function for each document in the collection", async () => { await useDb(async (db) => { - const users = generateUsers(1_000) - const cr = await db.users.addMany(users) - assert(cr.ok) + const users = generateUsers(1_000); + const cr = await db.users.addMany(users); + assert(cr.ok); - const docs: Document[] = [] - await db.users.forEach((doc) => docs.push(doc)) + const docs: Document[] = []; + await db.users.forEach((doc) => docs.push(doc)); - assert(docs.length === users.length) + assert(docs.length === users.length); assert( users.every((user) => docs.some((doc) => doc.value.username === user.username) ), - ) - }) + ); + }); }, - ) -}) + ); +}); diff --git a/tests/collection/getMany.test.ts b/tests/collection/getMany.test.ts index 284cbf8..0808a31 100644 --- a/tests/collection/getMany.test.ts +++ b/tests/collection/getMany.test.ts @@ -1,20 +1,20 @@ -import { assert } from "../test.deps.ts" -import { generateUsers, useDb } from "../utils.ts" +import { assert } from "../test.deps.ts"; +import { generateUsers, useDb } from "../utils.ts"; Deno.test("collection - getMany", async (t) => { await t.step("Should get all documents", async () => { await useDb(async (db) => { - const users = generateUsers(1_000) - const cr = await db.users.addMany(users) - assert(cr.ok) + const users = generateUsers(1_000); + const cr = await db.users.addMany(users); + assert(cr.ok); - const { result } = await db.users.getMany() - assert(result.length === users.length) + const { result } = await db.users.getMany(); + assert(result.length === users.length); assert( users.every((user) => result.some((doc) => doc.value.username === user.username) ), - ) - }) - }) -}) + ); + }); + }); +}); diff --git a/tests/collection/getOne.test.ts b/tests/collection/getOne.test.ts index 2fb7c52..e541f0c 100644 --- a/tests/collection/getOne.test.ts +++ b/tests/collection/getOne.test.ts @@ -1,20 +1,20 @@ -import { assert } from "../test.deps.ts" -import { sleep, useDb } from "../utils.ts" -import { mockUser1, mockUser2 } from "../mocks.ts" +import { assert } from "../test.deps.ts"; +import { sleep, useDb } from "../utils.ts"; +import { mockUser1, mockUser2 } from "../mocks.ts"; Deno.test("collection - getOne", async (t) => { await t.step("Should get only one document", async () => { await useDb(async (db) => { - const cr1 = await db.users.add(mockUser1) - await sleep(10) - const cr2 = await db.users.add(mockUser2) + const cr1 = await db.users.add(mockUser1); + await sleep(10); + const cr2 = await db.users.add(mockUser2); - assert(cr1.ok) - assert(cr2.ok) + assert(cr1.ok); + assert(cr2.ok); - const doc = await db.users.getOne() - assert(doc !== null) - assert(doc.value.username === mockUser1.username) - }) - }) -}) + const doc = await db.users.getOne(); + assert(doc !== null); + assert(doc.value.username === mockUser1.username); + }); + }); +}); diff --git a/tests/collection/history.test.ts b/tests/collection/history.test.ts index 56a3262..7bac1e6 100644 --- a/tests/collection/history.test.ts +++ b/tests/collection/history.test.ts @@ -1,8 +1,8 @@ -import { collection, kvdex, model } from "../../mod.ts" -import { assert } from "../test.deps.ts" -import { sleep, useKv } from "../utils.ts" -import { mockUser1, mockUser2, mockUser3 } from "../mocks.ts" -import type { User } from "../models.ts" +import { collection, kvdex, model } from "../../mod.ts"; +import { assert } from "../test.deps.ts"; +import { sleep, useKv } from "../utils.ts"; +import { mockUser1, mockUser2, mockUser3 } from "../mocks.ts"; +import type { User } from "../models.ts"; Deno.test("collection - history", async (t) => { await t.step( @@ -11,27 +11,27 @@ Deno.test("collection - history", async (t) => { await useKv(async (kv) => { const db = kvdex(kv, { users: collection(model(), { history: true }), - }) - - const id = "id" - await db.users.set(id, mockUser1, { overwrite: true }) - await sleep(10) - await db.users.set(id, mockUser2, { overwrite: true }) - await sleep(10) - await db.users.set(id, mockUser3, { overwrite: true }) - - const { result: [h1, h2, h3] } = await db.users.findHistory(id) - assert(h1.type === "write") - assert(h1.value.username === mockUser1.username) - assert(h1.timestamp.valueOf() <= h2.timestamp.valueOf()) - assert(h2.type === "write") - assert(h2.value.username === mockUser2.username) - assert(h2.timestamp.valueOf() <= h3.timestamp.valueOf()) - assert(h3.type === "write") - assert(h3.value.username === mockUser3.username) - }) + }); + + const id = "id"; + await db.users.set(id, mockUser1, { overwrite: true }); + await sleep(10); + await db.users.set(id, mockUser2, { overwrite: true }); + await sleep(10); + await db.users.set(id, mockUser3, { overwrite: true }); + + const { result: [h1, h2, h3] } = await db.users.findHistory(id); + assert(h1.type === "write"); + assert(h1.value.username === mockUser1.username); + assert(h1.timestamp.valueOf() <= h2.timestamp.valueOf()); + assert(h2.type === "write"); + assert(h2.value.username === mockUser2.username); + assert(h2.timestamp.valueOf() <= h3.timestamp.valueOf()); + assert(h3.type === "write"); + assert(h3.value.username === mockUser3.username); + }); }, - ) + ); await t.step( "Should persist history of multiple inserts in correct order after deleting", @@ -39,35 +39,35 @@ Deno.test("collection - history", async (t) => { await useKv(async (kv) => { const db = kvdex(kv, { users: collection(model(), { history: true }), - }) - - const id = "id" - await db.users.set(id, mockUser1, { overwrite: true }) - await sleep(10) - await db.users.delete(id) - await sleep(10) - await db.users.set(id, mockUser2, { overwrite: true }) - await sleep(10) - await db.users.set(id, mockUser3, { overwrite: true }) - await sleep(10) - await db.users.delete(id) - - const { result: [h1, h2, h3, h4, h5] } = await db.users.findHistory(id) - assert(h1.type === "write") - assert(h1.value.username === mockUser1.username) - assert(h1.timestamp.valueOf() <= h2.timestamp.valueOf()) - assert(h2.type === "delete") - assert(h2.timestamp.valueOf() <= h3.timestamp.valueOf()) - assert(h3.type === "write") - assert(h3.value.username === mockUser2.username) - assert(h3.timestamp.valueOf() <= h4.timestamp.valueOf()) - assert(h4.type === "write") - assert(h4.value.username === mockUser3.username) - assert(h4.timestamp.valueOf() <= h5.timestamp.valueOf()) - assert(h5.type === "delete") - }) + }); + + const id = "id"; + await db.users.set(id, mockUser1, { overwrite: true }); + await sleep(10); + await db.users.delete(id); + await sleep(10); + await db.users.set(id, mockUser2, { overwrite: true }); + await sleep(10); + await db.users.set(id, mockUser3, { overwrite: true }); + await sleep(10); + await db.users.delete(id); + + const { result: [h1, h2, h3, h4, h5] } = await db.users.findHistory(id); + assert(h1.type === "write"); + assert(h1.value.username === mockUser1.username); + assert(h1.timestamp.valueOf() <= h2.timestamp.valueOf()); + assert(h2.type === "delete"); + assert(h2.timestamp.valueOf() <= h3.timestamp.valueOf()); + assert(h3.type === "write"); + assert(h3.value.username === mockUser2.username); + assert(h3.timestamp.valueOf() <= h4.timestamp.valueOf()); + assert(h4.type === "write"); + assert(h4.value.username === mockUser3.username); + assert(h4.timestamp.valueOf() <= h5.timestamp.valueOf()); + assert(h5.type === "delete"); + }); }, - ) + ); await t.step( "Should persist history of multiple inserts and updates in correct order", @@ -75,27 +75,27 @@ Deno.test("collection - history", async (t) => { await useKv(async (kv) => { const db = kvdex(kv, { users: collection(model(), { history: true }), - }) - - const id = "id" - await db.users.set(id, mockUser1, { overwrite: true }) - await sleep(10) - await db.users.update(id, mockUser2) - await sleep(10) - await db.users.update(id, mockUser3) - - const { result: [h1, h2, h3] } = await db.users.findHistory(id) - assert(h1.type === "write") - assert(h1.value.username === mockUser1.username) - assert(h1.timestamp.valueOf() <= h2.timestamp.valueOf()) - assert(h2.type === "write") - assert(h2.value.username === mockUser2.username) - assert(h2.timestamp.valueOf() <= h3.timestamp.valueOf()) - assert(h3.type === "write") - assert(h3.value.username === mockUser3.username) - }) + }); + + const id = "id"; + await db.users.set(id, mockUser1, { overwrite: true }); + await sleep(10); + await db.users.update(id, mockUser2); + await sleep(10); + await db.users.update(id, mockUser3); + + const { result: [h1, h2, h3] } = await db.users.findHistory(id); + assert(h1.type === "write"); + assert(h1.value.username === mockUser1.username); + assert(h1.timestamp.valueOf() <= h2.timestamp.valueOf()); + assert(h2.type === "write"); + assert(h2.value.username === mockUser2.username); + assert(h2.timestamp.valueOf() <= h3.timestamp.valueOf()); + assert(h3.type === "write"); + assert(h3.value.username === mockUser3.username); + }); }, - ) + ); await t.step( "Should persist version history of insert and delete by deleteMany()", @@ -103,30 +103,30 @@ Deno.test("collection - history", async (t) => { await useKv(async (kv) => { const db = kvdex(kv, { users: collection(model(), { history: true }), - }) - - const id = "id" - await db.users.set(id, mockUser1, { overwrite: true }) - await sleep(10) - await db.users.deleteMany() - await sleep(10) - await db.users.set(id, mockUser2, { overwrite: true }) - await sleep(10) - await db.users.deleteMany({ filter: () => true }) - - const { result: [h1, h2, h3, h4] } = await db.users.findHistory(id) - assert(h1.type === "write") - assert(h1.value.username === mockUser1.username) - assert(h1.timestamp.valueOf() <= h2.timestamp.valueOf()) - assert(h2.type === "delete") - assert(h2.timestamp.valueOf() <= h3.timestamp.valueOf()) - assert(h3.type === "write") - assert(h3.value.username === mockUser2.username) - assert(h3.timestamp.valueOf() <= h4.timestamp.valueOf()) - assert(h4.type === "delete") - }) + }); + + const id = "id"; + await db.users.set(id, mockUser1, { overwrite: true }); + await sleep(10); + await db.users.deleteMany(); + await sleep(10); + await db.users.set(id, mockUser2, { overwrite: true }); + await sleep(10); + await db.users.deleteMany({ filter: () => true }); + + const { result: [h1, h2, h3, h4] } = await db.users.findHistory(id); + assert(h1.type === "write"); + assert(h1.value.username === mockUser1.username); + assert(h1.timestamp.valueOf() <= h2.timestamp.valueOf()); + assert(h2.type === "delete"); + assert(h2.timestamp.valueOf() <= h3.timestamp.valueOf()); + assert(h3.type === "write"); + assert(h3.value.username === mockUser2.username); + assert(h3.timestamp.valueOf() <= h4.timestamp.valueOf()); + assert(h4.type === "delete"); + }); }, - ) + ); await t.step( "Should not find history", @@ -134,83 +134,83 @@ Deno.test("collection - history", async (t) => { await useKv(async (kv) => { const db = kvdex(kv, { users: collection(model()), - }) + }); - const id = "id" - await db.users.set(id, mockUser1, { overwrite: true }) - await db.users.update(id, mockUser2) - await db.users.delete(id) - await db.users.deleteMany() + const id = "id"; + await db.users.set(id, mockUser1, { overwrite: true }); + await db.users.update(id, mockUser2); + await db.users.delete(id); + await db.users.deleteMany(); - const { result: history } = await db.users.findHistory(id) - assert(history.length === 0) - }) + const { result: history } = await db.users.findHistory(id); + assert(history.length === 0); + }); }, - ) + ); await t.step("Should find filtered history", async () => { await useKv(async (kv) => { const db = kvdex(kv, { users: collection(model(), { history: true }), - }) + }); - const id = "id" - await db.users.set(id, mockUser1, { overwrite: true }) - await db.users.delete(id) - await db.users.set(id, mockUser2, { overwrite: true }) - await db.users.update(id, mockUser3) + const id = "id"; + await db.users.set(id, mockUser1, { overwrite: true }); + await db.users.delete(id); + await db.users.set(id, mockUser2, { overwrite: true }); + await db.users.update(id, mockUser3); const { result: history1 } = await db.users.findHistory(id, { filter: (entry) => entry.type === "delete", - }) + }); const { result: history2 } = await db.users.findHistory(id, { filter: (entry) => entry.type === "write" && entry.value.age === mockUser1.age, - }) + }); - assert(history1.length === 1) - assert(history2.length === 2) + assert(history1.length === 1); + assert(history2.length === 2); assert( history2.some((h) => h.type === "write" && h.value.username === mockUser1.username ), - ) + ); assert( history2.some((h) => h.type === "write" && h.value.username === mockUser2.username ), - ) - }) - }) + ); + }); + }); await t.step("Should delete all document history", async () => { await useKv(async (kv) => { const db = kvdex(kv, { users: collection(model(), { history: true }), - }) - - const id = "id" - await db.users.set(id, mockUser1, { overwrite: true }) - await db.users.set(id, mockUser2, { overwrite: true }) - await db.users.set(id, mockUser3, { overwrite: true }) - const cr = await db.users.add(mockUser1) - - assert(cr.ok) - - const { result: history1_1 } = await db.users.findHistory(id) - const { result: history1_2 } = await db.users.findHistory(cr.id) - assert(history1_1.length === 3) - assert(history1_2.length === 1) - - await db.users.deleteHistory(id) - - const { result: history2_1 } = await db.users.findHistory(id) - const { result: history2_2 } = await db.users.findHistory(cr.id) - assert(history2_1.length === 0) - assert(history2_2.length === 1) - }) - }) -}) + }); + + const id = "id"; + await db.users.set(id, mockUser1, { overwrite: true }); + await db.users.set(id, mockUser2, { overwrite: true }); + await db.users.set(id, mockUser3, { overwrite: true }); + const cr = await db.users.add(mockUser1); + + assert(cr.ok); + + const { result: history1_1 } = await db.users.findHistory(id); + const { result: history1_2 } = await db.users.findHistory(cr.id); + assert(history1_1.length === 3); + assert(history1_2.length === 1); + + await db.users.deleteHistory(id); + + const { result: history2_1 } = await db.users.findHistory(id); + const { result: history2_2 } = await db.users.findHistory(cr.id); + assert(history2_1.length === 0); + assert(history2_2.length === 1); + }); + }); +}); diff --git a/tests/collection/listenQueue.test.ts b/tests/collection/listenQueue.test.ts index 0cf9278..0532950 100644 --- a/tests/collection/listenQueue.test.ts +++ b/tests/collection/listenQueue.test.ts @@ -4,40 +4,40 @@ import { type KvValue, model, type QueueMessage, -} from "../../mod.ts" +} from "../../mod.ts"; import { KVDEX_KEY_PREFIX, UNDELIVERED_KEY_PREFIX, -} from "../../src/constants.ts" -import { createHandlerId, extendKey } from "../../src/utils.ts" -import { assert } from "../test.deps.ts" -import { createResolver, sleep, useKv } from "../utils.ts" +} from "../../src/constants.ts"; +import { createHandlerId, extendKey } from "../../src/utils.ts"; +import { assert } from "../test.deps.ts"; +import { createResolver, sleep, useKv } from "../utils.ts"; Deno.test("collection - listenQueue", async (t) => { await t.step("Should receive message with string data", async () => { await useKv(async (kv) => { - const data = "data" - const undeliveredId = "id" - const sleeper = createResolver() + const data = "data"; + const undeliveredId = "id"; + const sleeper = createResolver(); const db = kvdex(kv, { numbers: collection(model()), - }) + }); - const handlerId = createHandlerId(db.numbers._keys.base, undefined) + const handlerId = createHandlerId(db.numbers._keys.base, undefined); - let assertion = false + let assertion = false; const listener = db.numbers.listenQueue((msgData) => { - assertion = msgData === data - sleeper.resolve() - }) + assertion = msgData === data; + sleeper.resolve(); + }); const msg: QueueMessage = { __is_undefined__: false, __handlerId__: handlerId, __data__: data, - } + }; await kv.enqueue(msg, { keysIfUndelivered: [ @@ -47,36 +47,36 @@ Deno.test("collection - listenQueue", async (t) => { undeliveredId, ), ], - }) + }); - await sleeper.promise + await sleeper.promise; - const undelivered = await db.numbers.findUndelivered(undeliveredId) - assert(assertion || typeof undelivered?.value === typeof data) + const undelivered = await db.numbers.findUndelivered(undeliveredId); + assert(assertion || typeof undelivered?.value === typeof data); - return async () => await listener - }) - }) + return async () => await listener; + }); + }); await t.step("Should not receive db queue message", async () => { await useKv(async (kv) => { const db = kvdex(kv, { numbers: collection(model()), - }) + }); - let assertion = true + let assertion = true; const listener = db.numbers.listenQueue(() => { - assertion = false - }) + assertion = false; + }); - await db.enqueue("data") + await db.enqueue("data"); - await sleep(500) + await sleep(500); - assert(assertion) + assert(assertion); - return async () => await listener - }) - }) -}) + return async () => await listener; + }); + }); +}); diff --git a/tests/collection/map.test.ts b/tests/collection/map.test.ts index 2a86159..9ed432b 100644 --- a/tests/collection/map.test.ts +++ b/tests/collection/map.test.ts @@ -1,24 +1,24 @@ -import { assert } from "../test.deps.ts" -import { generateUsers, useDb } from "../utils.ts" +import { assert } from "../test.deps.ts"; +import { generateUsers, useDb } from "../utils.ts"; Deno.test("collection - map", async (t) => { await t.step( "Should run callback mapper function for each document in the collection", async () => { await useDb(async (db) => { - const users = generateUsers(1_000) - const cr = await db.users.addMany(users) - assert(cr.ok) + const users = generateUsers(1_000); + const cr = await db.users.addMany(users); + assert(cr.ok); - const { result } = await db.users.map((doc) => doc.value.username) + const { result } = await db.users.map((doc) => doc.value.username); - assert(result.length === users.length) + assert(result.length === users.length); assert( users.every((user) => result.some((username) => username === user.username) ), - ) - }) + ); + }); }, - ) -}) + ); +}); diff --git a/tests/collection/properties.test.ts b/tests/collection/properties.test.ts index 1c0dd52..e535a7a 100644 --- a/tests/collection/properties.test.ts +++ b/tests/collection/properties.test.ts @@ -1,23 +1,23 @@ -import { collection, type Document, kvdex, model } from "../../mod.ts" -import { ID_KEY_PREFIX, KVDEX_KEY_PREFIX } from "../../src/constants.ts" -import { extendKey, keyEq } from "../../src/utils.ts" -import { assert } from "../test.deps.ts" -import { mockUser1, mockUser2, mockUser3 } from "../mocks.ts" -import type { User } from "../models.ts" -import { generateUsers, useDb, useKv } from "../utils.ts" -import { sleep } from "../utils.ts" +import { collection, type Document, kvdex, model } from "../../mod.ts"; +import { ID_KEY_PREFIX, KVDEX_KEY_PREFIX } from "../../src/constants.ts"; +import { extendKey, keyEq } from "../../src/utils.ts"; +import { assert } from "../test.deps.ts"; +import { mockUser1, mockUser2, mockUser3 } from "../mocks.ts"; +import type { User } from "../models.ts"; +import { generateUsers, useDb, useKv } from "../utils.ts"; +import { sleep } from "../utils.ts"; Deno.test("collection - properties", async (t) => { await t.step("Keys should have the correct prefixes", async () => { await useDb((db) => { - const baseKey = db.users._keys.base - const idKey = db.users._keys.id - const prefix = extendKey([KVDEX_KEY_PREFIX], "users") + const baseKey = db.users._keys.base; + const idKey = db.users._keys.id; + const prefix = extendKey([KVDEX_KEY_PREFIX], "users"); - assert(keyEq(baseKey, prefix)) - assert(keyEq(idKey, extendKey(prefix, ID_KEY_PREFIX))) - }) - }) + assert(keyEq(baseKey, prefix)); + assert(keyEq(idKey, extendKey(prefix, ID_KEY_PREFIX))); + }); + }); await t.step("Should generate ids with custom id generator", async () => { await useKv((kv) => { @@ -31,254 +31,254 @@ Deno.test("collection - properties", async (t) => { username: "primary", }, }), - }) + }); - const id1 = db.users1._idGenerator(mockUser1) - const id2 = db.users2._idGenerator(mockUser1) + const id1 = db.users1._idGenerator(mockUser1); + const id2 = db.users2._idGenerator(mockUser1); - assert(typeof id1 === "number") - assert(id2 === mockUser1.username) - }) - }) + assert(typeof id1 === "number"); + assert(id2 === mockUser1.username); + }); + }); await t.step("Should select using cursor pagination", async () => { await useDb(async (db) => { - const users = generateUsers(1_000) - const cr = await db.users.addMany(users) - assert(cr.ok) + const users = generateUsers(1_000); + const cr = await db.users.addMany(users); + assert(cr.ok); - const selected: Document[] = [] - let cursor: string | undefined = undefined + const selected: Document[] = []; + let cursor: string | undefined = undefined; do { const query = await db.users.getMany({ cursor, limit: users.length / 10, - }) + }); - selected.push(...query.result) - cursor = query.cursor - } while (cursor) + selected.push(...query.result); + cursor = query.cursor; + } while (cursor); assert( users.every((user) => selected.some((doc) => doc.value.username === user.username) ), - ) - }) - }) + ); + }); + }); await t.step("Should select using offset pagination", async () => { await useDb(async (db) => { - const users = generateUsers(1_000) - const cr = await db.users.addMany(users) - assert(cr.ok) + const users = generateUsers(1_000); + const cr = await db.users.addMany(users); + assert(cr.ok); - const selected: Document[] = [] - const limit = 50 + const selected: Document[] = []; + const limit = 50; for (let offset = 0; offset < users.length; offset += limit) { - const { result } = await db.users.getMany({ offset, limit }) - selected.push(...result) - assert(result.length === 50) + const { result } = await db.users.getMany({ offset, limit }); + selected.push(...result); + assert(result.length === 50); } assert( users.every((user) => selected.some((doc) => doc.value.username === user.username) ), - ) - }) - }) + ); + }); + }); await t.step("Should select filtered", async () => { await useDb(async (db) => { - const users = generateUsers(10) - const cr = await db.users.addMany(users) - const count1 = await db.users.count() - assert(cr.ok) - assert(count1 === users.length) + const users = generateUsers(10); + const cr = await db.users.addMany(users); + const count1 = await db.users.count(); + assert(cr.ok); + assert(count1 === users.length); - const sliced = users.slice(5, 7) + const sliced = users.slice(5, 7); const { result } = await db.users.getMany({ filter: (doc) => sliced.map((user) => user.username).includes( doc.value.username, ), - }) + }); - assert(result.length === sliced.length) + assert(result.length === sliced.length); assert( result.every((doc) => sliced.some((user) => user.username === doc.value.username) ), - ) - }) - }) + ); + }); + }); await t.step("Should select in reverse", async () => { await useDb(async (db) => { - const users = generateUsers(10) - const cr = await db.users.addMany(users) - const count1 = await db.users.count() - assert(cr.ok) - assert(count1 === users.length) + const users = generateUsers(10); + const cr = await db.users.addMany(users); + const count1 = await db.users.count(); + assert(cr.ok); + assert(count1 === users.length); - const query1 = await db.users.getMany() - const query2 = await db.users.getMany({ reverse: true }) + const query1 = await db.users.getMany(); + const query2 = await db.users.getMany({ reverse: true }); assert( JSON.stringify(query1.result) === JSON.stringify(query2.result.reverse()), - ) - }) - }) + ); + }); + }); await t.step("Should select from start id", async () => { await useDb(async (db) => { - const users = generateUsers(10) - const cr = await db.users.addMany(users) - const count1 = await db.users.count() - assert(cr.ok) - assert(count1 === users.length) + const users = generateUsers(10); + const cr = await db.users.addMany(users); + const count1 = await db.users.count(); + assert(cr.ok); + assert(count1 === users.length); - const index = 5 + const index = 5; - const query1 = await db.users.getMany() + const query1 = await db.users.getMany(); const query2 = await db.users.getMany({ startId: query1.result.at(index)?.id, - }) + }); - assert(query2.result.length === query1.result.slice(index).length) + assert(query2.result.length === query1.result.slice(index).length); assert( query2.result.every((doc1) => query1.result.slice(index).some((doc2) => doc1.id === doc2.id) ), - ) - }) - }) + ); + }); + }); await t.step("Should select until end id", async () => { await useDb(async (db) => { - const users = generateUsers(10) - const cr = await db.users.addMany(users) - const count1 = await db.users.count() - assert(cr.ok) - assert(count1 === users.length) + const users = generateUsers(10); + const cr = await db.users.addMany(users); + const count1 = await db.users.count(); + assert(cr.ok); + assert(count1 === users.length); - const index = 5 + const index = 5; - const query1 = await db.users.getMany() + const query1 = await db.users.getMany(); const query2 = await db.users.getMany({ endId: query1.result.at(index)?.id, - }) + }); - assert(query2.result.length === query1.result.slice(0, index).length) + assert(query2.result.length === query1.result.slice(0, index).length); assert( query2.result.every((doc1) => query1.result.slice(0, index).some((doc2) => doc1.id === doc2.id) ), - ) - }) - }) + ); + }); + }); await t.step("Should select from start id to end id", async () => { await useDb(async (db) => { - const users = generateUsers(10) - const cr = await db.users.addMany(users) - const count1 = await db.users.count() - assert(cr.ok) - assert(count1 === users.length) + const users = generateUsers(10); + const cr = await db.users.addMany(users); + const count1 = await db.users.count(); + assert(cr.ok); + assert(count1 === users.length); - const index1 = 5 - const index2 = 7 + const index1 = 5; + const index2 = 7; - const query1 = await db.users.getMany() + const query1 = await db.users.getMany(); const query2 = await db.users.getMany({ startId: query1.result.at(index1)?.id, endId: query1.result.at(index2)?.id, - }) + }); assert( query2.result.length === query1.result.slice(index1, index2).length, - ) + ); assert( query2.result.every((doc1) => query1.result.slice(index1, index2).some((doc2) => doc1.id === doc2.id ) ), - ) - }) - }) + ); + }); + }); await t.step("Should select limited by database reads", async () => { await useDb(async (db) => { - const cr1 = await db.users.add(mockUser1) - await sleep(10) - const cr2 = await db.users.add(mockUser2) - await sleep(10) - const cr3 = await db.users.add(mockUser3) + const cr1 = await db.users.add(mockUser1); + await sleep(10); + const cr2 = await db.users.add(mockUser2); + await sleep(10); + const cr3 = await db.users.add(mockUser3); - assert(cr1.ok) - assert(cr2.ok) - assert(cr3.ok) + assert(cr1.ok); + assert(cr2.ok); + assert(cr3.ok); const { result } = await db.users.getMany({ limit: 2, filter: (doc) => doc.value.username !== mockUser1.username, - }) + }); - assert(result.every((doc) => doc.value.username === mockUser2.username)) - }) - }) + assert(result.every((doc) => doc.value.username === mockUser2.username)); + }); + }); await t.step("Should select limited by result count", async () => { await useDb(async (db) => { - const cr1 = await db.users.add(mockUser1) - await sleep(10) - const cr2 = await db.users.add(mockUser2) - await sleep(10) - const cr3 = await db.users.add(mockUser3) + const cr1 = await db.users.add(mockUser1); + await sleep(10); + const cr2 = await db.users.add(mockUser2); + await sleep(10); + const cr3 = await db.users.add(mockUser3); - assert(cr1.ok) - assert(cr2.ok) - assert(cr3.ok) + assert(cr1.ok); + assert(cr2.ok); + assert(cr3.ok); const { result } = await db.users.getMany({ take: 2, filter: (doc) => doc.value.username !== mockUser1.username, - }) + }); - assert(result.length === 2) - assert(result.some((doc) => doc.value.username === mockUser2.username)) - assert(result.some((doc) => doc.value.username === mockUser3.username)) - }) - }) + assert(result.length === 2); + assert(result.some((doc) => doc.value.username === mockUser2.username)); + assert(result.some((doc) => doc.value.username === mockUser3.username)); + }); + }); await t.step("Should correctly infer type of document", async () => { await useDb(async (db) => { - const doc = await db.users.find("") + const doc = await db.users.find(""); if (doc) { - doc.value.age.valueOf() + doc.value.age.valueOf(); } - }) - }) + }); + }); await t.step( "Should correctly infer insert and output of asymmetric model", async () => { await useDb(async (db) => { - const cr = await db.a_users.add(mockUser1) - assert(cr.ok) - - const doc = await db.a_users.find(cr.id) - assert(doc !== null) - assert(typeof doc.value.addressStr === "string") - assert(typeof doc.value.decadeAge === "number") - assert(typeof doc.value.name === "string") - }) + const cr = await db.a_users.add(mockUser1); + assert(cr.ok); + + const doc = await db.a_users.find(cr.id); + assert(doc !== null); + assert(typeof doc.value.addressStr === "string"); + assert(typeof doc.value.decadeAge === "number"); + assert(typeof doc.value.name === "string"); + }); }, - ) + ); await t.step("Should successfully generate id asynchronously", async () => { await useKv(async (kv) => { @@ -288,23 +288,23 @@ Deno.test("collection - properties", async (t) => { const buffer = await crypto.subtle.digest( "SHA-256", new ArrayBuffer(user.age), - ) - return Math.random() * buffer.byteLength + ); + return Math.random() * buffer.byteLength; }, }), - }) + }); - const cr1 = await db.test.add(mockUser1) - const cr2 = await db.atomic((s) => s.test).add(mockUser2).commit() + const cr1 = await db.test.add(mockUser1); + const cr2 = await db.atomic((s) => s.test).add(mockUser2).commit(); const doc2 = await db.test.getOne({ filter: (doc) => doc.value.username === mockUser2.username, - }) - - assert(cr1.ok) - assert(typeof cr1.id === "number") - assert(cr2.ok) - assert(doc2 !== null) - assert(typeof doc2.id === "number") - }) - }) -}) + }); + + assert(cr1.ok); + assert(typeof cr1.id === "number"); + assert(cr2.ok); + assert(doc2 !== null); + assert(typeof doc2.id === "number"); + }); + }); +}); diff --git a/tests/collection/set.test.ts b/tests/collection/set.test.ts index e0b966f..6aa2baa 100644 --- a/tests/collection/set.test.ts +++ b/tests/collection/set.test.ts @@ -1,68 +1,68 @@ -import { assert } from "../test.deps.ts" -import { mockUser1, mockUser2, mockUserInvalid } from "../mocks.ts" -import { useDb } from "../utils.ts" +import { assert } from "../test.deps.ts"; +import { mockUser1, mockUser2, mockUserInvalid } from "../mocks.ts"; +import { useDb } from "../utils.ts"; Deno.test("collection - set", async (t) => { await t.step("Should set new document entry in collection", async () => { await useDb(async (db) => { - const cr = await db.users.set("id", mockUser1) - assert(cr.ok) + const cr = await db.users.set("id", mockUser1); + assert(cr.ok); - const doc = await db.users.find(cr.id) - assert(doc !== null) - assert(doc.value.username === mockUser1.username) - }) - }) + const doc = await db.users.find(cr.id); + assert(doc !== null); + assert(doc.value.username === mockUser1.username); + }); + }); await t.step( "Should not set new document entry in collection with colliding id", async () => { await useDb(async (db) => { - const id = "id" + const id = "id"; - const cr1 = await db.users.set(id, mockUser1) - assert(cr1.ok) + const cr1 = await db.users.set(id, mockUser1); + assert(cr1.ok); - const cr2 = await db.users.set(id, mockUser2) - assert(!cr2.ok) + const cr2 = await db.users.set(id, mockUser2); + assert(!cr2.ok); - const doc = await db.users.find(id) - assert(doc !== null) - assert(doc.value.username === mockUser1.username) - }) + const doc = await db.users.find(id); + assert(doc !== null); + assert(doc.value.username === mockUser1.username); + }); }, - ) + ); await t.step( "Should overwrite document in collection with colliding id", async () => { await useDb(async (db) => { - const cr1 = await db.users.set("id", mockUser1) - assert(cr1.ok) + const cr1 = await db.users.set("id", mockUser1); + assert(cr1.ok); - const cr2 = await db.users.set("id", mockUser2, { overwrite: true }) - assert(cr2.ok) + const cr2 = await db.users.set("id", mockUser2, { overwrite: true }); + assert(cr2.ok); - const doc = await db.users.find("id") - assert(doc !== null) - assert(doc.value.username === mockUser2.username) - }) + const doc = await db.users.find("id"); + assert(doc !== null); + assert(doc.value.username === mockUser2.username); + }); }, - ) + ); await t.step("Should successfully parse and set document", async () => { await useDb(async (db) => { - let assertion = true - await db.z_users.set("id", mockUser1).catch(() => assertion = false) - assert(assertion) - }) - }) + let assertion = true; + await db.z_users.set("id", mockUser1).catch(() => assertion = false); + assert(assertion); + }); + }); await t.step("Should fail to parse and set document", async () => { await useDb(async (db) => { - let assertion = false - await db.z_users.set("id", mockUserInvalid).catch(() => assertion = true) - assert(assertion) - }) - }) -}) + let assertion = false; + await db.z_users.set("id", mockUserInvalid).catch(() => assertion = true); + assert(assertion); + }); + }); +}); diff --git a/tests/collection/types.test.ts b/tests/collection/types.test.ts index fada885..326f194 100644 --- a/tests/collection/types.test.ts +++ b/tests/collection/types.test.ts @@ -1,7 +1,7 @@ -import { collection, kvdex, model } from "../../mod.ts" -import { assert, assertEquals } from "../test.deps.ts" -import { useKv } from "../utils.ts" -import { VALUES } from "../values.ts" +import { collection, kvdex, model } from "../../mod.ts"; +import { assert, assertEquals } from "../test.deps.ts"; +import { useKv } from "../utils.ts"; +import { VALUES } from "../values.ts"; Deno.test("collection - types", async (t) => { await t.step( @@ -13,19 +13,19 @@ Deno.test("collection - types", async (t) => { val, i, ) => [i, collection(model())]), - ) + ); - const db = kvdex(kv, schema) + const db = kvdex(kv, schema); - const crs = await Promise.all(VALUES.map((val, i) => db[i].add(val))) - assert(crs.every((cr) => cr.ok)) + const crs = await Promise.all(VALUES.map((val, i) => db[i].add(val))); + assert(crs.every((cr) => cr.ok)); await Promise.all( VALUES.map((_, i) => db[i].forEach((doc) => assertEquals(doc.value, VALUES[i])) ), - ) - }) + ); + }); }, - ) -}) + ); +}); diff --git a/tests/collection/update.test.ts b/tests/collection/update.test.ts index 6ed202e..af59bca 100644 --- a/tests/collection/update.test.ts +++ b/tests/collection/update.test.ts @@ -1,15 +1,15 @@ -import { collection, kvdex, model } from "../../mod.ts" -import { assert, assertEquals } from "../test.deps.ts" -import { mockUser1, mockUser2, mockUserInvalid } from "../mocks.ts" -import { useDb, useKv } from "../utils.ts" +import { collection, kvdex, model } from "../../mod.ts"; +import { assert, assertEquals } from "../test.deps.ts"; +import { mockUser1, mockUser2, mockUserInvalid } from "../mocks.ts"; +import { useDb, useKv } from "../utils.ts"; Deno.test("collection - update", async (t) => { await t.step( "Should update document of KvObject type using shallow merge", async () => { await useDb(async (db) => { - const cr = await db.users.add(mockUser1) - assert(cr.ok) + const cr = await db.users.add(mockUser1); + assert(cr.ok); const updateData = { address: { @@ -17,35 +17,35 @@ Deno.test("collection - update", async (t) => { city: "Dublin", houseNr: null, }, - } + }; const updateCr = await db.users.update(cr.id, updateData, { strategy: "merge-shallow", - }) - - assert(updateCr.ok) - assert(updateCr.id === cr.id) - assert(updateCr.versionstamp !== cr.versionstamp) - - const doc = await db.users.find(cr.id) - - assert(doc !== null) - assert(doc.value.username === mockUser1.username) - assert(doc.value.age === mockUser1.age) - assert(doc.value.address.country === updateData.address.country) - assert(doc.value.address.city === updateData.address.city) - assert(doc.value.address.houseNr === updateData.address.houseNr) - assert(typeof doc.value.address.street === "undefined") - }) + }); + + assert(updateCr.ok); + assert(updateCr.id === cr.id); + assert(updateCr.versionstamp !== cr.versionstamp); + + const doc = await db.users.find(cr.id); + + assert(doc !== null); + assert(doc.value.username === mockUser1.username); + assert(doc.value.age === mockUser1.age); + assert(doc.value.address.country === updateData.address.country); + assert(doc.value.address.city === updateData.address.city); + assert(doc.value.address.houseNr === updateData.address.houseNr); + assert(typeof doc.value.address.street === "undefined"); + }); }, - ) + ); await t.step( "Should update document of KvObject type using deep merge", async () => { await useDb(async (db) => { - const cr = await db.users.add(mockUser1) - assert(cr.ok) + const cr = await db.users.add(mockUser1); + assert(cr.ok); const updateData = { address: { @@ -53,56 +53,56 @@ Deno.test("collection - update", async (t) => { city: "Dublin", houseNr: null, }, - } + }; const updateCr = await db.users.update(cr.id, updateData, { strategy: "merge", - }) - - assert(updateCr.ok) - assert(updateCr.id === cr.id) - assert(updateCr.versionstamp !== cr.versionstamp) - - const doc = await db.users.find(cr.id) - - assert(doc !== null) - assert(doc.value.username === mockUser1.username) - assert(doc.value.age === mockUser1.age) - assert(doc.value.address.country === updateData.address.country) - assert(doc.value.address.city === updateData.address.city) - assert(doc.value.address.houseNr === updateData.address.houseNr) - assert(doc.value.address.street === mockUser1.address.street) - }) + }); + + assert(updateCr.ok); + assert(updateCr.id === cr.id); + assert(updateCr.versionstamp !== cr.versionstamp); + + const doc = await db.users.find(cr.id); + + assert(doc !== null); + assert(doc.value.username === mockUser1.username); + assert(doc.value.age === mockUser1.age); + assert(doc.value.address.country === updateData.address.country); + assert(doc.value.address.city === updateData.address.city); + assert(doc.value.address.houseNr === updateData.address.houseNr); + assert(doc.value.address.street === mockUser1.address.street); + }); }, - ) + ); await t.step( "Should update document of KvObject type using replace", async () => { await useDb(async (db) => { - const cr = await db.users.add(mockUser1) - assert(cr.ok) + const cr = await db.users.add(mockUser1); + assert(cr.ok); const updateCr = await db.users.update(cr.id, mockUser2, { strategy: "replace", - }) - - assert(updateCr.ok) - assert(updateCr.id === cr.id) - assert(updateCr.versionstamp !== cr.versionstamp) - - const doc = await db.users.find(cr.id) - - assert(doc !== null) - assert(doc.value.username === mockUser2.username) - assert(doc.value.age === mockUser2.age) - assert(doc.value.address.country === mockUser2.address.country) - assert(doc.value.address.city === mockUser2.address.city) - assert(doc.value.address.houseNr === mockUser2.address.houseNr) - assert(doc.value.address.street === mockUser2.address.street) - }) + }); + + assert(updateCr.ok); + assert(updateCr.id === cr.id); + assert(updateCr.versionstamp !== cr.versionstamp); + + const doc = await db.users.find(cr.id); + + assert(doc !== null); + assert(doc.value.username === mockUser2.username); + assert(doc.value.age === mockUser2.age); + assert(doc.value.address.country === mockUser2.address.country); + assert(doc.value.address.city === mockUser2.address.city); + assert(doc.value.address.houseNr === mockUser2.address.houseNr); + assert(doc.value.address.street === mockUser2.address.street); + }); }, - ) + ); await t.step( "Should update documents of type Array, Set and Map using merge", @@ -112,57 +112,57 @@ Deno.test("collection - update", async (t) => { arrays: collection(model()), sets: collection(model>()), maps: collection(model>()), - }) + }); - const val1 = [1, 2, 4] - const setEntries = [1, 2, 4] - const val2 = new Set(setEntries) - const mapEntries = [["1", 1], ["2", 2], ["4", 4]] as const - const val3 = new Map(mapEntries) + const val1 = [1, 2, 4]; + const setEntries = [1, 2, 4]; + const val2 = new Set(setEntries); + const mapEntries = [["1", 1], ["2", 2], ["4", 4]] as const; + const val3 = new Map(mapEntries); - const cr1 = await db.arrays.add(val1) - const cr2 = await db.sets.add(val2) - const cr3 = await db.maps.add(val3) + const cr1 = await db.arrays.add(val1); + const cr2 = await db.sets.add(val2); + const cr3 = await db.maps.add(val3); - assert(cr1.ok) - assert(cr2.ok) - assert(cr3.ok) + assert(cr1.ok); + assert(cr2.ok); + assert(cr3.ok); - const u1 = [1, 3, 5] - const uSetEntries = [1, 3, 5] - const u2 = new Set(uSetEntries) - const uMapEntries = [["1", 1], ["3", 3], ["5", 5]] as const - const u3 = new Map(uMapEntries) + const u1 = [1, 3, 5]; + const uSetEntries = [1, 3, 5]; + const u2 = new Set(uSetEntries); + const uMapEntries = [["1", 1], ["3", 3], ["5", 5]] as const; + const u3 = new Map(uMapEntries); const updateCr1 = await db.arrays.update(cr1.id, u1, { strategy: "merge", - }) + }); const updateCr2 = await db.sets.update(cr2.id, u2, { strategy: "merge", - }) + }); const updateCr3 = await db.maps.update(cr3.id, u3, { strategy: "merge", - }) - - assert(updateCr1.ok) - assert(updateCr2.ok) - assert(updateCr3.ok) - - const doc1 = await db.arrays.find(cr1.id) - const doc2 = await db.sets.find(cr2.id) - const doc3 = await db.maps.find(cr3.id) - - assert(doc1 !== null) - assert(doc2 !== null) - assert(doc3 !== null) - assertEquals(doc1.value, [...val1, ...u1]) - assertEquals(doc2.value, new Set([...setEntries, ...uSetEntries])) - assertEquals(doc3.value, new Map([...mapEntries, ...uMapEntries])) - }) + }); + + assert(updateCr1.ok); + assert(updateCr2.ok); + assert(updateCr3.ok); + + const doc1 = await db.arrays.find(cr1.id); + const doc2 = await db.sets.find(cr2.id); + const doc3 = await db.maps.find(cr3.id); + + assert(doc1 !== null); + assert(doc2 !== null); + assert(doc3 !== null); + assertEquals(doc1.value, [...val1, ...u1]); + assertEquals(doc2.value, new Set([...setEntries, ...uSetEntries])); + assertEquals(doc3.value, new Map([...mapEntries, ...uMapEntries])); + }); }, - ) + ); await t.step( "Should update documents of primitive and built-in object types using replace", @@ -172,65 +172,65 @@ Deno.test("collection - update", async (t) => { numbers: collection(model()), strings: collection(model()), dates: collection(model()), - }) - - const cr1 = await db.numbers.add(10) - const cr2 = await db.strings.add("10") - const cr3 = await db.dates.add(new Date("2000-01-01")) - assert(cr1.ok && cr2.ok && cr3.ok) - - const val1 = 20 - const val2 = "20" - const val3 = new Date("2016-01-01") - - const updateCr1 = await db.numbers.update(cr1.id, val1) - const updateCr2 = await db.strings.update(cr2.id, val2) - const updateCr3 = await db.dates.update(cr3.id, val3) - assert(updateCr1.ok && updateCr2.ok && updateCr3.ok) - assert(updateCr1.id === cr1.id) - assert(updateCr1.versionstamp !== cr1.versionstamp) - assert(updateCr2.id === cr2.id) - assert(updateCr2.versionstamp !== cr2.versionstamp) - assert(updateCr3.id === cr3.id) - assert(updateCr3.versionstamp !== cr3.versionstamp) - - const doc1 = await db.numbers.find(cr1.id) - const doc2 = await db.strings.find(cr2.id) - const doc3 = await db.dates.find(cr3.id) - assert(doc1 !== null && doc2 !== null && doc3 !== null) - - assert(doc1.value === val1) - assert(doc2.value === val2) - assert(doc3.value.valueOf() === val3.valueOf()) - }) + }); + + const cr1 = await db.numbers.add(10); + const cr2 = await db.strings.add("10"); + const cr3 = await db.dates.add(new Date("2000-01-01")); + assert(cr1.ok && cr2.ok && cr3.ok); + + const val1 = 20; + const val2 = "20"; + const val3 = new Date("2016-01-01"); + + const updateCr1 = await db.numbers.update(cr1.id, val1); + const updateCr2 = await db.strings.update(cr2.id, val2); + const updateCr3 = await db.dates.update(cr3.id, val3); + assert(updateCr1.ok && updateCr2.ok && updateCr3.ok); + assert(updateCr1.id === cr1.id); + assert(updateCr1.versionstamp !== cr1.versionstamp); + assert(updateCr2.id === cr2.id); + assert(updateCr2.versionstamp !== cr2.versionstamp); + assert(updateCr3.id === cr3.id); + assert(updateCr3.versionstamp !== cr3.versionstamp); + + const doc1 = await db.numbers.find(cr1.id); + const doc2 = await db.strings.find(cr2.id); + const doc3 = await db.dates.find(cr3.id); + assert(doc1 !== null && doc2 !== null && doc3 !== null); + + assert(doc1.value === val1); + assert(doc2.value === val2); + assert(doc3.value.valueOf() === val3.valueOf()); + }); }, - ) + ); await t.step("Should successfully parse and update document", async () => { await useDb(async (db) => { - let assertion = true + let assertion = true; - const cr = await db.z_users.add(mockUser1) - assert(cr.ok) + const cr = await db.z_users.add(mockUser1); + assert(cr.ok); - await db.z_users.update(cr.id, mockUser2).catch(() => assertion = false) + await db.z_users.update(cr.id, mockUser2).catch(() => assertion = false); - assert(assertion) - }) - }) + assert(assertion); + }); + }); await t.step("Should fail to parse and update document", async () => { await useDb(async (db) => { - let assertion = false + let assertion = false; - const cr = await db.z_users.add(mockUser1) - assert(cr.ok) + const cr = await db.z_users.add(mockUser1); + assert(cr.ok); await db.z_users.update(cr.id, mockUserInvalid).catch(() => assertion = true - ) + ); - assert(assertion) - }) - }) -}) + assert(assertion); + }); + }); +}); diff --git a/tests/collection/updateMany.test.ts b/tests/collection/updateMany.test.ts index aa91b8a..f8a6d1f 100644 --- a/tests/collection/updateMany.test.ts +++ b/tests/collection/updateMany.test.ts @@ -1,20 +1,20 @@ -import { collection, kvdex, model } from "../../mod.ts" -import { assert, assertEquals } from "../test.deps.ts" -import { mockUser1, mockUserInvalid } from "../mocks.ts" -import { generateNumbers, generateUsers, useDb, useKv } from "../utils.ts" +import { collection, kvdex, model } from "../../mod.ts"; +import { assert, assertEquals } from "../test.deps.ts"; +import { mockUser1, mockUserInvalid } from "../mocks.ts"; +import { generateNumbers, generateUsers, useDb, useKv } from "../utils.ts"; Deno.test("collection - updateMany", async (t) => { await t.step( "Should update 1000 documents of KvObject type using shallow merge", async () => { await useDb(async (db) => { - const users = generateUsers(1_000) - const cr = await db.users.addMany(users) - assert(cr.ok) + const users = generateUsers(1_000); + const cr = await db.users.addMany(users); + assert(cr.ok); - const docs = await db.users.getMany() - const ids = docs.result.map((doc) => doc.id) - const versionstamps = docs.result.map((doc) => doc.versionstamp) + const docs = await db.users.getMany(); + const ids = docs.result.map((doc) => doc.id); + const versionstamps = docs.result.map((doc) => doc.versionstamp); const updateData = { address: { @@ -22,40 +22,40 @@ Deno.test("collection - updateMany", async (t) => { city: "Dublin", houseNr: null, }, - } + }; const { result } = await db.users.updateMany(updateData, { strategy: "merge-shallow", - }) + }); assert( result.every((cr) => cr.ok && ids.includes(cr.id) && !versionstamps.includes(cr.versionstamp) ), - ) + ); await db.users.forEach((doc) => { - assert(doc.value.address.country === updateData.address.country) - assert(doc.value.address.city === updateData.address.city) - assert(doc.value.address.houseNr === updateData.address.houseNr) - assert(typeof doc.value.address.street === "undefined") - }) - }) + assert(doc.value.address.country === updateData.address.country); + assert(doc.value.address.city === updateData.address.city); + assert(doc.value.address.houseNr === updateData.address.houseNr); + assert(typeof doc.value.address.street === "undefined"); + }); + }); }, - ) + ); await t.step( "Should update 1000 documents of KvObject type using deep merge", async () => { await useDb(async (db) => { - const users = generateUsers(1_000) - const cr = await db.users.addMany(users) - assert(cr.ok) + const users = generateUsers(1_000); + const cr = await db.users.addMany(users); + assert(cr.ok); - const docs = await db.users.getMany() - const ids = docs.result.map((doc) => doc.id) - const versionstamps = docs.result.map((doc) => doc.versionstamp) + const docs = await db.users.getMany(); + const ids = docs.result.map((doc) => doc.id); + const versionstamps = docs.result.map((doc) => doc.versionstamp); const updateData = { address: { @@ -63,61 +63,61 @@ Deno.test("collection - updateMany", async (t) => { city: "Dublin", houseNr: null, }, - } + }; const { result } = await db.users.updateMany(updateData, { strategy: "merge", - }) + }); assert( result.every((cr) => cr.ok && ids.includes(cr.id) && !versionstamps.includes(cr.versionstamp) ), - ) + ); await db.users.forEach((doc) => { - assert(doc.value.address.country === updateData.address.country) - assert(doc.value.address.city === updateData.address.city) - assert(doc.value.address.houseNr === updateData.address.houseNr) - assert(doc.value.address.street !== undefined) - }) - }) + assert(doc.value.address.country === updateData.address.country); + assert(doc.value.address.city === updateData.address.city); + assert(doc.value.address.houseNr === updateData.address.houseNr); + assert(doc.value.address.street !== undefined); + }); + }); }, - ) + ); await t.step( "Should update 1000 documents of KvObject type using replace", async () => { await useDb(async (db) => { - const users = generateUsers(1_000) - const cr = await db.users.addMany(users) - assert(cr.ok) + const users = generateUsers(1_000); + const cr = await db.users.addMany(users); + assert(cr.ok); - const docs = await db.users.getMany() - const ids = docs.result.map((doc) => doc.id) - const versionstamps = docs.result.map((doc) => doc.versionstamp) + const docs = await db.users.getMany(); + const ids = docs.result.map((doc) => doc.id); + const versionstamps = docs.result.map((doc) => doc.versionstamp); const { result } = await db.users.updateMany(mockUser1, { strategy: "replace", - }) + }); assert( result.every((cr) => cr.ok && ids.includes(cr.id) && !versionstamps.includes(cr.versionstamp) ), - ) + ); await db.users.forEach((doc) => { - assert(doc.value.address.country === mockUser1.address.country) - assert(doc.value.address.city === mockUser1.address.city) - assert(doc.value.address.houseNr === mockUser1.address.houseNr) - assert(doc.value.address.street === mockUser1.address.street) - }) - }) + assert(doc.value.address.country === mockUser1.address.country); + assert(doc.value.address.city === mockUser1.address.city); + assert(doc.value.address.houseNr === mockUser1.address.houseNr); + assert(doc.value.address.street === mockUser1.address.street); + }); + }); }, - ) + ); await t.step( "Should update 1000 documents of type Array, Set and Map using merge", @@ -127,71 +127,71 @@ Deno.test("collection - updateMany", async (t) => { arrays: collection(model()), sets: collection(model>()), maps: collection(model>()), - }) + }); - const val1 = [1, 2, 4] - const setEntries = [1, 2, 4] - const val2 = new Set(setEntries) - const mapEntries = [["1", 1], ["2", 2], ["4", 4]] as const - const val3 = new Map(mapEntries) + const val1 = [1, 2, 4]; + const setEntries = [1, 2, 4]; + const val2 = new Set(setEntries); + const mapEntries = [["1", 1], ["2", 2], ["4", 4]] as const; + const val3 = new Map(mapEntries); - const vals1: number[][] = [] - const vals2: Set[] = [] - const vals3: Map[] = [] + const vals1: number[][] = []; + const vals2: Set[] = []; + const vals3: Map[] = []; for (let i = 0; i < 1_000; i++) { - vals1.push(val1) - vals2.push(val2) - vals3.push(val3) + vals1.push(val1); + vals2.push(val2); + vals3.push(val3); } - const cr1 = await db.arrays.addMany(vals1) - const cr2 = await db.sets.addMany(vals2) - const cr3 = await db.maps.addMany(vals3) + const cr1 = await db.arrays.addMany(vals1); + const cr2 = await db.sets.addMany(vals2); + const cr3 = await db.maps.addMany(vals3); - assert(cr1.ok) - assert(cr2.ok) - assert(cr3.ok) + assert(cr1.ok); + assert(cr2.ok); + assert(cr3.ok); - const u1 = [1, 3, 5] - const uSetEntries = [1, 3, 5] - const u2 = new Set(uSetEntries) - const uMapEntries = [["1", 1], ["3", 3], ["5", 5]] as const - const u3 = new Map(uMapEntries) + const u1 = [1, 3, 5]; + const uSetEntries = [1, 3, 5]; + const u2 = new Set(uSetEntries); + const uMapEntries = [["1", 1], ["3", 3], ["5", 5]] as const; + const u3 = new Map(uMapEntries); const { result: updateCrs1 } = await db.arrays.updateMany(u1, { strategy: "merge", - }) + }); const { result: updateCrs2 } = await db.sets.updateMany(u2, { strategy: "merge", - }) + }); const { result: updateCrs3 } = await db.maps.updateMany(u3, { strategy: "merge", - }) + }); - assert(updateCrs1.every((cr) => cr.ok)) - assert(updateCrs2.every((cr) => cr.ok)) - assert(updateCrs3.every((cr) => cr.ok)) + assert(updateCrs1.every((cr) => cr.ok)); + assert(updateCrs2.every((cr) => cr.ok)); + assert(updateCrs3.every((cr) => cr.ok)); - const { result: docs1 } = await db.arrays.getMany() - const { result: docs2 } = await db.sets.getMany() - const { result: docs3 } = await db.maps.getMany() + const { result: docs1 } = await db.arrays.getMany(); + const { result: docs2 } = await db.sets.getMany(); + const { result: docs3 } = await db.maps.getMany(); - assert(docs1.length === vals1.length) - assert(docs2.length === vals2.length) - assert(docs3.length === vals3.length) + assert(docs1.length === vals1.length); + assert(docs2.length === vals2.length); + assert(docs3.length === vals3.length); - docs1.forEach((doc) => assertEquals(doc.value, [...val1, ...u1])) + docs1.forEach((doc) => assertEquals(doc.value, [...val1, ...u1])); docs2.forEach((doc) => assertEquals(doc.value, new Set([...setEntries, ...uSetEntries])) - ) + ); docs3.forEach((doc) => assertEquals(doc.value, new Map([...mapEntries, ...uMapEntries])) - ) - }) + ); + }); }, - ) + ); await t.step( "Should update 1000 documents of types primitive and built-in object using replace", @@ -201,98 +201,100 @@ Deno.test("collection - updateMany", async (t) => { numbers: collection(model()), strings: collection(model()), dates: collection(model()), - }) + }); - const numbers = generateNumbers(1_000) + const numbers = generateNumbers(1_000); - const strings: string[] = [] + const strings: string[] = []; for (let i = 0; i < 1_000; i++) { - strings.push(Math.random().toString()) + strings.push(Math.random().toString()); } - const dates: Date[] = [] + const dates: Date[] = []; for (let i = 0; i < 1_000; i++) { - dates.push(new Date("2000-01-01")) + dates.push(new Date("2000-01-01")); } - const cr1 = await db.numbers.addMany(numbers) - const cr2 = await db.strings.addMany(strings) - const cr3 = await db.dates.addMany(dates) + const cr1 = await db.numbers.addMany(numbers); + const cr2 = await db.strings.addMany(strings); + const cr3 = await db.dates.addMany(dates); - assert(cr1.ok) - assert(cr2.ok) - assert(cr3.ok) + assert(cr1.ok); + assert(cr2.ok); + assert(cr3.ok); - const docs1 = await db.numbers.getMany() - const docs2 = await db.strings.getMany() - const docs3 = await db.dates.getMany() + const docs1 = await db.numbers.getMany(); + const docs2 = await db.strings.getMany(); + const docs3 = await db.dates.getMany(); - const ids1 = docs1.result.map((doc) => doc.id) - const ids2 = docs2.result.map((doc) => doc.id) - const ids3 = docs3.result.map((doc) => doc.id) + const ids1 = docs1.result.map((doc) => doc.id); + const ids2 = docs2.result.map((doc) => doc.id); + const ids3 = docs3.result.map((doc) => doc.id); - const versionstamps1 = docs1.result.map((doc) => doc.versionstamp) - const versionstamps2 = docs2.result.map((doc) => doc.versionstamp) - const versionstamps3 = docs3.result.map((doc) => doc.versionstamp) + const versionstamps1 = docs1.result.map((doc) => doc.versionstamp); + const versionstamps2 = docs2.result.map((doc) => doc.versionstamp); + const versionstamps3 = docs3.result.map((doc) => doc.versionstamp); - const val1 = 20 - const val2 = "updated" - const val3 = new Date("2016-01-01") + const val1 = 20; + const val2 = "updated"; + const val3 = new Date("2016-01-01"); - const updateQuery1 = await db.numbers.updateMany(val1) - const updateQuery2 = await db.strings.updateMany(val2) - const updateQuery3 = await db.dates.updateMany(val3) + const updateQuery1 = await db.numbers.updateMany(val1); + const updateQuery2 = await db.strings.updateMany(val2); + const updateQuery3 = await db.dates.updateMany(val3); - assert(updateQuery1.result.every((cr) => cr.ok)) - assert(updateQuery2.result.every((cr) => cr.ok)) - assert(updateQuery3.result.every((cr) => cr.ok)) + assert(updateQuery1.result.every((cr) => cr.ok)); + assert(updateQuery2.result.every((cr) => cr.ok)); + assert(updateQuery3.result.every((cr) => cr.ok)); await db.numbers.forEach((doc) => { - assertEquals(doc.value, val1) - assert(ids1.includes(doc.id)) - assert(!versionstamps1.includes(doc.versionstamp)) - }) + assertEquals(doc.value, val1); + assert(ids1.includes(doc.id)); + assert(!versionstamps1.includes(doc.versionstamp)); + }); await db.strings.forEach((doc) => { - assertEquals(doc.value, val2) - assert(ids2.includes(doc.id)) - assert(!versionstamps2.includes(doc.versionstamp)) - }) + assertEquals(doc.value, val2); + assert(ids2.includes(doc.id)); + assert(!versionstamps2.includes(doc.versionstamp)); + }); await db.dates.forEach((doc) => { - assertEquals(doc.value, val3) - assert(ids3.includes(doc.id)) - assert(!versionstamps3.includes(doc.versionstamp)) - }) - }) + assertEquals(doc.value, val3); + assert(ids3.includes(doc.id)); + assert(!versionstamps3.includes(doc.versionstamp)); + }); + }); }, - ) + ); await t.step("Should successfully parse and update", async () => { await useDb(async (db) => { - const users = generateUsers(10) - let assertion = true + const users = generateUsers(10); + let assertion = true; - const cr = await db.z_users.addMany(users) - assert(cr.ok) + const cr = await db.z_users.addMany(users); + assert(cr.ok); - await db.z_users.updateMany(mockUser1).catch(() => assertion = false) + await db.z_users.updateMany(mockUser1).catch(() => assertion = false); - assert(assertion) - }) - }) + assert(assertion); + }); + }); await t.step("Should fail to parse and update document", async () => { await useDb(async (db) => { - const users = generateUsers(10) - let assertion = false + const users = generateUsers(10); + let assertion = false; - const cr = await db.z_users.addMany(users) - assert(cr.ok) + const cr = await db.z_users.addMany(users); + assert(cr.ok); - await db.z_users.updateMany(mockUserInvalid).catch(() => assertion = true) + await db.z_users.updateMany(mockUserInvalid).catch(() => + assertion = true + ); - assert(assertion) - }) - }) -}) + assert(assertion); + }); + }); +}); diff --git a/tests/collection/updateOne.test.ts b/tests/collection/updateOne.test.ts index b450d54..137b3be 100644 --- a/tests/collection/updateOne.test.ts +++ b/tests/collection/updateOne.test.ts @@ -1,25 +1,25 @@ -import { collection, kvdex, model } from "../../mod.ts" -import { assert, assertEquals, assertNotEquals } from "../test.deps.ts" -import { mockUser1, mockUser2, mockUser3, mockUserInvalid } from "../mocks.ts" +import { collection, kvdex, model } from "../../mod.ts"; +import { assert, assertEquals, assertNotEquals } from "../test.deps.ts"; +import { mockUser1, mockUser2, mockUser3, mockUserInvalid } from "../mocks.ts"; import { generateNumbers, generateUsers, sleep, useDb, useKv, -} from "../utils.ts" +} from "../utils.ts"; Deno.test("collection - updateOne", async (t) => { await t.step( "Should update only one document of KvObject type using shallow merge", async () => { await useDb(async (db) => { - const cr1 = await db.users.add(mockUser1) - await sleep(10) - const cr2 = await db.users.add(mockUser2) + const cr1 = await db.users.add(mockUser1); + await sleep(10); + const cr2 = await db.users.add(mockUser2); - assert(cr1.ok) - assert(cr2.ok) + assert(cr1.ok); + assert(cr2.ok); const updateData = { address: { @@ -27,43 +27,43 @@ Deno.test("collection - updateOne", async (t) => { city: "Dublin", houseNr: null, }, - } + }; const updateCr = await db.users.updateOne(updateData, { strategy: "merge-shallow", - }) + }); - assert(updateCr.ok) + assert(updateCr.ok); - const doc1 = await db.users.find(cr1.id) - const doc2 = await db.users.find(cr2.id) + const doc1 = await db.users.find(cr1.id); + const doc2 = await db.users.find(cr2.id); - assert(doc1) - assert(doc2) + assert(doc1); + assert(doc2); - assert(doc1.value.address.country === updateData.address.country) - assert(doc1.value.address.city === updateData.address.city) - assert(doc1.value.address.houseNr === updateData.address.houseNr) - assert(doc1.value.address.street === undefined) + assert(doc1.value.address.country === updateData.address.country); + assert(doc1.value.address.city === updateData.address.city); + assert(doc1.value.address.houseNr === updateData.address.houseNr); + assert(doc1.value.address.street === undefined); - assert(doc2.value.address.country === mockUser2.address.country) - assert(doc2.value.address.city === mockUser2.address.city) - assert(doc2.value.address.houseNr === mockUser2.address.houseNr) - assert(doc2.value.address.street === mockUser2.address.street) - }) + assert(doc2.value.address.country === mockUser2.address.country); + assert(doc2.value.address.city === mockUser2.address.city); + assert(doc2.value.address.houseNr === mockUser2.address.houseNr); + assert(doc2.value.address.street === mockUser2.address.street); + }); }, - ) + ); await t.step( "Should update only one document of KvObject type using deep merge", async () => { await useDb(async (db) => { - const cr1 = await db.users.add(mockUser1) - await sleep(10) - const cr2 = await db.users.add(mockUser2) + const cr1 = await db.users.add(mockUser1); + await sleep(10); + const cr2 = await db.users.add(mockUser2); - assert(cr1.ok) - assert(cr2.ok) + assert(cr1.ok); + assert(cr2.ok); const updateData = { address: { @@ -71,74 +71,74 @@ Deno.test("collection - updateOne", async (t) => { city: "Dublin", houseNr: null, }, - } + }; const updateCr = await db.users.updateOne(updateData, { strategy: "merge", - }) + }); - assert(updateCr.ok) + assert(updateCr.ok); - const doc1 = await db.users.find(cr1.id) - const doc2 = await db.users.find(cr2.id) + const doc1 = await db.users.find(cr1.id); + const doc2 = await db.users.find(cr2.id); - assert(doc1) - assert(doc2) + assert(doc1); + assert(doc2); - assert(doc1.value.address.country === updateData.address.country) - assert(doc1.value.address.city === updateData.address.city) - assert(doc1.value.address.houseNr === updateData.address.houseNr) - assert(doc1.value.address.street === mockUser1.address.street) + assert(doc1.value.address.country === updateData.address.country); + assert(doc1.value.address.city === updateData.address.city); + assert(doc1.value.address.houseNr === updateData.address.houseNr); + assert(doc1.value.address.street === mockUser1.address.street); - assert(doc2.value.address.country === mockUser2.address.country) - assert(doc2.value.address.city === mockUser2.address.city) - assert(doc2.value.address.houseNr === mockUser2.address.houseNr) - assert(doc2.value.address.street === mockUser2.address.street) - }) + assert(doc2.value.address.country === mockUser2.address.country); + assert(doc2.value.address.city === mockUser2.address.city); + assert(doc2.value.address.houseNr === mockUser2.address.houseNr); + assert(doc2.value.address.street === mockUser2.address.street); + }); }, - ) + ); await t.step( "Should update only one document of KvObject type using replace", async () => { await useDb(async (db) => { - const cr1 = await db.users.add(mockUser1) - await sleep(10) - const cr2 = await db.users.add(mockUser2) + const cr1 = await db.users.add(mockUser1); + await sleep(10); + const cr2 = await db.users.add(mockUser2); - assert(cr1.ok) - assert(cr2.ok) + assert(cr1.ok); + assert(cr2.ok); - const updateData = mockUser3 + const updateData = mockUser3; const updateCr = await db.users.updateOne(updateData, { strategy: "replace", - }) - - assert(updateCr.ok) - - const doc1 = await db.users.find(cr1.id) - const doc2 = await db.users.find(cr2.id) - - assert(doc1) - assert(doc2) - - assert(doc1.value.username === updateData.username) - assert(doc1.value.age === updateData.age) - assert(doc1.value.address.country === updateData.address.country) - assert(doc1.value.address.city === updateData.address.city) - assert(doc1.value.address.houseNr === updateData.address.houseNr) - assert(doc1.value.address.street === undefined) - - assert(doc2.value.username === mockUser2.username) - assert(doc2.value.age === mockUser2.age) - assert(doc2.value.address.country === mockUser2.address.country) - assert(doc2.value.address.city === mockUser2.address.city) - assert(doc2.value.address.houseNr === mockUser2.address.houseNr) - assert(doc2.value.address.street === mockUser2.address.street) - }) + }); + + assert(updateCr.ok); + + const doc1 = await db.users.find(cr1.id); + const doc2 = await db.users.find(cr2.id); + + assert(doc1); + assert(doc2); + + assert(doc1.value.username === updateData.username); + assert(doc1.value.age === updateData.age); + assert(doc1.value.address.country === updateData.address.country); + assert(doc1.value.address.city === updateData.address.city); + assert(doc1.value.address.houseNr === updateData.address.houseNr); + assert(doc1.value.address.street === undefined); + + assert(doc2.value.username === mockUser2.username); + assert(doc2.value.age === mockUser2.age); + assert(doc2.value.address.country === mockUser2.address.country); + assert(doc2.value.address.city === mockUser2.address.city); + assert(doc2.value.address.houseNr === mockUser2.address.houseNr); + assert(doc2.value.address.street === mockUser2.address.street); + }); }, - ) + ); await t.step( "Should update only one document of type Array, Set and Map using merge", @@ -148,71 +148,71 @@ Deno.test("collection - updateOne", async (t) => { arrays: collection(model()), sets: collection(model>()), maps: collection(model>()), - }) + }); - const val1 = [1, 2, 4] - const setEntries = [1, 2, 4] - const val2 = new Set(setEntries) - const mapEntries = [["1", 1], ["2", 2], ["4", 4]] as const - const val3 = new Map(mapEntries) + const val1 = [1, 2, 4]; + const setEntries = [1, 2, 4]; + const val2 = new Set(setEntries); + const mapEntries = [["1", 1], ["2", 2], ["4", 4]] as const; + const val3 = new Map(mapEntries); - const vals1: number[][] = [] - const vals2: Set[] = [] - const vals3: Map[] = [] + const vals1: number[][] = []; + const vals2: Set[] = []; + const vals3: Map[] = []; for (let i = 0; i < 1_000; i++) { - vals1.push(val1) - vals2.push(val2) - vals3.push(val3) + vals1.push(val1); + vals2.push(val2); + vals3.push(val3); } - const cr1 = await db.arrays.addMany(vals1) - const cr2 = await db.sets.addMany(vals2) - const cr3 = await db.maps.addMany(vals3) + const cr1 = await db.arrays.addMany(vals1); + const cr2 = await db.sets.addMany(vals2); + const cr3 = await db.maps.addMany(vals3); - assert(cr1.ok) - assert(cr2.ok) - assert(cr3.ok) + assert(cr1.ok); + assert(cr2.ok); + assert(cr3.ok); - const u1 = [1, 3, 5] - const uSetEntries = [1, 3, 5] - const u2 = new Set(uSetEntries) - const uMapEntries = [["1", 1], ["3", 3], ["5", 5]] as const - const u3 = new Map(uMapEntries) + const u1 = [1, 3, 5]; + const uSetEntries = [1, 3, 5]; + const u2 = new Set(uSetEntries); + const uMapEntries = [["1", 1], ["3", 3], ["5", 5]] as const; + const u3 = new Map(uMapEntries); const updateCr1 = await db.arrays.updateOne(u1, { strategy: "merge", - }) + }); const updateCr2 = await db.sets.updateOne(u2, { strategy: "merge", - }) + }); const updateCr3 = await db.maps.updateOne(u3, { strategy: "merge", - }) + }); - assert(updateCr1.ok) - assert(updateCr2.ok) - assert(updateCr3.ok) + assert(updateCr1.ok); + assert(updateCr2.ok); + assert(updateCr3.ok); - const { result: [d1, ...docs1] } = await db.arrays.getMany() - const { result: [d2, ...docs2] } = await db.sets.getMany() - const { result: [d3, ...docs3] } = await db.maps.getMany() + const { result: [d1, ...docs1] } = await db.arrays.getMany(); + const { result: [d2, ...docs2] } = await db.sets.getMany(); + const { result: [d3, ...docs3] } = await db.maps.getMany(); - assertEquals(d1.value, [...val1, ...u1]) - assertEquals(d2.value, new Set([...setEntries, ...uSetEntries])) - assertEquals(d3.value, new Map([...mapEntries, ...uMapEntries])) + assertEquals(d1.value, [...val1, ...u1]); + assertEquals(d2.value, new Set([...setEntries, ...uSetEntries])); + assertEquals(d3.value, new Map([...mapEntries, ...uMapEntries])); - docs1.forEach((doc) => assertNotEquals(doc.value, [...val1, ...u1])) + docs1.forEach((doc) => assertNotEquals(doc.value, [...val1, ...u1])); docs2.forEach((doc) => assertNotEquals(doc.value, new Set([...setEntries, ...uSetEntries])) - ) + ); docs3.forEach((doc) => assertNotEquals(doc.value, new Map([...mapEntries, ...uMapEntries])) - ) - }) + ); + }); }, - ) + ); await t.step( "Should update only one document of types primitive and built-in object using replace", @@ -222,81 +222,81 @@ Deno.test("collection - updateOne", async (t) => { numbers: collection(model()), strings: collection(model()), dates: collection(model()), - }) + }); - const numbers = generateNumbers(1_000) + const numbers = generateNumbers(1_000); - const strings: string[] = [] + const strings: string[] = []; for (let i = 0; i < 1_000; i++) { - strings.push(Math.random().toString()) + strings.push(Math.random().toString()); } - const dates: Date[] = [] + const dates: Date[] = []; for (let i = 0; i < 1_000; i++) { - dates.push(new Date("2000-01-01")) + dates.push(new Date("2000-01-01")); } - const cr1 = await db.numbers.addMany(numbers) - const cr2 = await db.strings.addMany(strings) - const cr3 = await db.dates.addMany(dates) + const cr1 = await db.numbers.addMany(numbers); + const cr2 = await db.strings.addMany(strings); + const cr3 = await db.dates.addMany(dates); - assert(cr1.ok) - assert(cr2.ok) - assert(cr3.ok) + assert(cr1.ok); + assert(cr2.ok); + assert(cr3.ok); - const val1 = -100 - const val2 = "updated" - const val3 = new Date("2016-01-01") + const val1 = -100; + const val2 = "updated"; + const val3 = new Date("2016-01-01"); - const updateCr1 = await db.numbers.updateOne(val1) - const updateCr2 = await db.strings.updateOne(val2) - const updateCr3 = await db.dates.updateOne(val3) + const updateCr1 = await db.numbers.updateOne(val1); + const updateCr2 = await db.strings.updateOne(val2); + const updateCr3 = await db.dates.updateOne(val3); - assert(updateCr1.ok) - assert(updateCr2.ok) - assert(updateCr3.ok) + assert(updateCr1.ok); + assert(updateCr2.ok); + assert(updateCr3.ok); - const { result: [d1, ...ds1] } = await db.numbers.getMany() - const { result: [d2, ...ds2] } = await db.strings.getMany() - const { result: [d3, ...ds3] } = await db.dates.getMany() + const { result: [d1, ...ds1] } = await db.numbers.getMany(); + const { result: [d2, ...ds2] } = await db.strings.getMany(); + const { result: [d3, ...ds3] } = await db.dates.getMany(); - assertEquals(d1.value, val1) - ds1.forEach((doc) => assertNotEquals(doc.value, val1)) + assertEquals(d1.value, val1); + ds1.forEach((doc) => assertNotEquals(doc.value, val1)); - assertEquals(d2.value, val2) - ds2.forEach((doc) => assertNotEquals(doc.value, val2)) + assertEquals(d2.value, val2); + ds2.forEach((doc) => assertNotEquals(doc.value, val2)); - assertEquals(d3.value, val3) - ds3.forEach((doc) => assertNotEquals(doc.value, val3)) - }) + assertEquals(d3.value, val3); + ds3.forEach((doc) => assertNotEquals(doc.value, val3)); + }); }, - ) + ); await t.step("Should successfully parse and update", async () => { await useDb(async (db) => { - const users = generateUsers(10) - let assertion = true + const users = generateUsers(10); + let assertion = true; - const cr = await db.z_users.addMany(users) - assert(cr.ok) + const cr = await db.z_users.addMany(users); + assert(cr.ok); - await db.z_users.updateOne(mockUser1).catch(() => assertion = false) + await db.z_users.updateOne(mockUser1).catch(() => assertion = false); - assert(assertion) - }) - }) + assert(assertion); + }); + }); await t.step("Should fail to parse and update document", async () => { await useDb(async (db) => { - const users = generateUsers(10) - let assertion = false + const users = generateUsers(10); + let assertion = false; - const cr = await db.z_users.addMany(users) - assert(cr.ok) + const cr = await db.z_users.addMany(users); + assert(cr.ok); - await db.z_users.updateOne(mockUserInvalid).catch(() => assertion = true) + await db.z_users.updateOne(mockUserInvalid).catch(() => assertion = true); - assert(assertion) - }) - }) -}) + assert(assertion); + }); + }); +}); diff --git a/tests/collection/upsert.test.ts b/tests/collection/upsert.test.ts index 70166cb..3e23ae1 100644 --- a/tests/collection/upsert.test.ts +++ b/tests/collection/upsert.test.ts @@ -1,35 +1,35 @@ -import { assert } from "../test.deps.ts" -import { mockUser1, mockUser2, mockUser3 } from "../mocks.ts" -import type { User } from "../models.ts" -import { useDb } from "../utils.ts" +import { assert } from "../test.deps.ts"; +import { mockUser1, mockUser2, mockUser3 } from "../mocks.ts"; +import type { User } from "../models.ts"; +import { useDb } from "../utils.ts"; Deno.test("collection - upsert", async (t) => { await t.step("Should set new doucment entry by id", async () => { await useDb(async (db) => { - const id = "id" + const id = "id"; const cr = await db.users.upsert({ id: id, set: mockUser2, update: mockUser3, - }) + }); - assert(cr.ok) + assert(cr.ok); - const doc = await db.users.find(id) - assert(doc !== null) - assert(doc.value.username === mockUser2.username) - }) - }) + const doc = await db.users.find(id); + assert(doc !== null); + assert(doc.value.username === mockUser2.username); + }); + }); await t.step( "Should update existing document entry by id using shallow merge", async () => { await useDb(async (db) => { - const id = "id" + const id = "id"; - const cr1 = await db.users.set(id, mockUser1) - assert(cr1.ok) + const cr1 = await db.users.set(id, mockUser1); + assert(cr1.ok); const updateData: Partial = { address: { @@ -37,7 +37,7 @@ Deno.test("collection - upsert", async (t) => { city: "London", houseNr: null, }, - } + }; const cr2 = await db.users.upsert({ id: id, @@ -45,30 +45,30 @@ Deno.test("collection - upsert", async (t) => { update: updateData, }, { strategy: "merge-shallow", - }) - - assert(cr2.ok) - - const doc = await db.users.find(id) - assert(doc !== null) - assert(doc.value.username === mockUser1.username) - assert(doc.value.age === mockUser1.age) - assert(doc.value.address.city === updateData.address?.city) - assert(doc.value.address.country === updateData.address.country) - assert(doc.value.address.houseNr === updateData.address.houseNr) - assert(doc.value.address.street === undefined) - }) + }); + + assert(cr2.ok); + + const doc = await db.users.find(id); + assert(doc !== null); + assert(doc.value.username === mockUser1.username); + assert(doc.value.age === mockUser1.age); + assert(doc.value.address.city === updateData.address?.city); + assert(doc.value.address.country === updateData.address.country); + assert(doc.value.address.houseNr === updateData.address.houseNr); + assert(doc.value.address.street === undefined); + }); }, - ) + ); await t.step( "Should update existing document entry by id using deep merge", async () => { await useDb(async (db) => { - const id = "id" + const id = "id"; - const cr1 = await db.users.set(id, mockUser1) - assert(cr1.ok) + const cr1 = await db.users.set(id, mockUser1); + assert(cr1.ok); const updateData: Partial = { address: { @@ -76,7 +76,7 @@ Deno.test("collection - upsert", async (t) => { city: "London", houseNr: null, }, - } + }; const cr2 = await db.users.upsert({ id: id, @@ -84,30 +84,30 @@ Deno.test("collection - upsert", async (t) => { update: updateData, }, { strategy: "merge", - }) - - assert(cr2.ok) - - const doc = await db.users.find(id) - assert(doc !== null) - assert(doc.value.username === mockUser1.username) - assert(doc.value.age === mockUser1.age) - assert(doc.value.address.city === updateData.address?.city) - assert(doc.value.address.country === updateData.address.country) - assert(doc.value.address.houseNr === updateData.address.houseNr) - assert(doc.value.address.street === mockUser1.address.street) - }) + }); + + assert(cr2.ok); + + const doc = await db.users.find(id); + assert(doc !== null); + assert(doc.value.username === mockUser1.username); + assert(doc.value.age === mockUser1.age); + assert(doc.value.address.city === updateData.address?.city); + assert(doc.value.address.country === updateData.address.country); + assert(doc.value.address.houseNr === updateData.address.houseNr); + assert(doc.value.address.street === mockUser1.address.street); + }); }, - ) + ); await t.step( "Should update existing document entry by id using replace", async () => { await useDb(async (db) => { - const id = "id" + const id = "id"; - const cr1 = await db.users.set(id, mockUser1) - assert(cr1.ok) + const cr1 = await db.users.set(id, mockUser1); + assert(cr1.ok); const cr2 = await db.users.upsert({ id: id, @@ -115,19 +115,19 @@ Deno.test("collection - upsert", async (t) => { update: mockUser3, }, { strategy: "replace", - }) - - assert(cr2.ok) - - const doc = await db.users.find(id) - assert(doc !== null) - assert(doc.value.username === mockUser3.username) - assert(doc.value.age === mockUser3.age) - assert(doc.value.address.city === mockUser3.address?.city) - assert(doc.value.address.country === mockUser3.address.country) - assert(doc.value.address.houseNr === mockUser3.address.houseNr) - assert(doc.value.address.street === mockUser3.address.street) - }) + }); + + assert(cr2.ok); + + const doc = await db.users.find(id); + assert(doc !== null); + assert(doc.value.username === mockUser3.username); + assert(doc.value.age === mockUser3.age); + assert(doc.value.address.city === mockUser3.address?.city); + assert(doc.value.address.country === mockUser3.address.country); + assert(doc.value.address.houseNr === mockUser3.address.houseNr); + assert(doc.value.address.street === mockUser3.address.street); + }); }, - ) -}) + ); +}); diff --git a/tests/collection/watch.test.ts b/tests/collection/watch.test.ts index 42a84d0..88b4dd9 100644 --- a/tests/collection/watch.test.ts +++ b/tests/collection/watch.test.ts @@ -1,70 +1,70 @@ -import { assert } from "../test.deps.ts" -import { mockUser1, mockUser2, mockUser3 } from "../mocks.ts" -import { sleep, useDb } from "../utils.ts" -import type { Document } from "../../mod.ts" -import type { User } from "../models.ts" +import { assert } from "../test.deps.ts"; +import { mockUser1, mockUser2, mockUser3 } from "../mocks.ts"; +import { sleep, useDb } from "../utils.ts"; +import type { Document } from "../../mod.ts"; +import type { User } from "../models.ts"; Deno.test("collection - watch", async (t) => { await t.step("Should receive all document updates", async () => { await useDb(async (db) => { - const id = "id" - const docs: (Document | null)[] = [] + const id = "id"; + const docs: (Document | null)[] = []; const { promise, cancel } = db.users.watch(id, (doc) => { - docs.push(doc) - }) + docs.push(doc); + }); - await db.users.set(id, mockUser1) - await sleep(500) - await db.users.set(id, mockUser2, { overwrite: true }) - await sleep(500) - await db.users.update(id, mockUser3) - await sleep(500) - await db.users.delete(id) - await sleep(500) + await db.users.set(id, mockUser1); + await sleep(500); + await db.users.set(id, mockUser2, { overwrite: true }); + await sleep(500); + await db.users.update(id, mockUser3); + await sleep(500); + await db.users.delete(id); + await sleep(500); - assert(docs.some((doc) => doc?.value.username === mockUser1.username)) - assert(docs.some((doc) => doc?.value.username === mockUser2.username)) - assert(docs.some((doc) => doc?.value.username === mockUser3.username)) - assert(docs.some((doc) => doc === null)) + assert(docs.some((doc) => doc?.value.username === mockUser1.username)); + assert(docs.some((doc) => doc?.value.username === mockUser2.username)); + assert(docs.some((doc) => doc?.value.username === mockUser3.username)); + assert(docs.some((doc) => doc === null)); - await cancel() - await promise - }) - }) + await cancel(); + await promise; + }); + }); await t.step("Should not receive unrelated document updates", async () => { await useDb(async (db) => { - const id1 = "id1" - const id2 = "id2" - let count = 0 - let username = "" - let lastDoc: any + const id1 = "id1"; + const id2 = "id2"; + let count = 0; + let username = ""; + let lastDoc: any; const { promise, cancel } = db.users.watch(id1, (doc) => { - count++ - lastDoc = doc + count++; + lastDoc = doc; if (doc?.value.username) { - username = doc.value.username + username = doc.value.username; } - }) + }); - await db.users.set(id2, mockUser1) - await sleep(500) - await db.users.set(id2, mockUser2, { overwrite: true }) - await sleep(500) - await db.users.update(id2, mockUser3) - await sleep(500) - await db.users.delete(id2) - await sleep(500) + await db.users.set(id2, mockUser1); + await sleep(500); + await db.users.set(id2, mockUser2, { overwrite: true }); + await sleep(500); + await db.users.update(id2, mockUser3); + await sleep(500); + await db.users.delete(id2); + await sleep(500); // Account for initial invocation - assert(count === 1) - assert(username === "") - assert(lastDoc === null) + assert(count === 1); + assert(username === ""); + assert(lastDoc === null); - await cancel() - await promise - }) - }) -}) + await cancel(); + await promise; + }); + }); +}); diff --git a/tests/collection/watchMany.test.ts b/tests/collection/watchMany.test.ts index 8ac88b3..0c589a6 100644 --- a/tests/collection/watchMany.test.ts +++ b/tests/collection/watchMany.test.ts @@ -1,117 +1,117 @@ -import { assert } from "../test.deps.ts" -import { mockUser1, mockUser2, mockUser3 } from "../mocks.ts" -import { sleep, useDb } from "../utils.ts" -import type { Document } from "../../mod.ts" -import type { User } from "../models.ts" +import { assert } from "../test.deps.ts"; +import { mockUser1, mockUser2, mockUser3 } from "../mocks.ts"; +import { sleep, useDb } from "../utils.ts"; +import type { Document } from "../../mod.ts"; +import type { User } from "../models.ts"; Deno.test("collection - watchMany", async (t) => { await t.step("Should receive all document updates", async () => { await useDb(async (db) => { - const id1 = "id1" - const id2 = "id2" - const id3 = "id3" - const snapshots: (Document | null)[][] = [] + const id1 = "id1"; + const id2 = "id2"; + const id3 = "id3"; + const snapshots: (Document | null)[][] = []; - await db.users.set(id3, mockUser1) + await db.users.set(id3, mockUser1); const { promise, cancel } = db.users.watchMany( [id1, id2, id3], (docs) => { - snapshots.push(docs) + snapshots.push(docs); }, - ) + ); - const cr1 = await db.users.set(id1, mockUser1) - await sleep(500) - await db.users.delete(id1) - await sleep(500) - const cr2 = await db.users.set(id2, mockUser2, { overwrite: true }) - await sleep(500) - const cr3 = await db.users.update(id3, mockUser3) - await sleep(500) + const cr1 = await db.users.set(id1, mockUser1); + await sleep(500); + await db.users.delete(id1); + await sleep(500); + const cr2 = await db.users.set(id2, mockUser2, { overwrite: true }); + await sleep(500); + const cr3 = await db.users.update(id3, mockUser3); + await sleep(500); - assert(cr1.ok) - assert(cr2.ok) - assert(cr3.ok) + assert(cr1.ok); + assert(cr2.ok); + assert(cr3.ok); assert(snapshots.some((docs) => { - const doc1 = docs.find((doc) => doc?.id === id1) ?? null - const doc2 = docs.find((doc) => doc?.id === id2) ?? null - const doc3 = docs.find((doc) => doc?.id === id3) ?? null + const doc1 = docs.find((doc) => doc?.id === id1) ?? null; + const doc2 = docs.find((doc) => doc?.id === id2) ?? null; + const doc3 = docs.find((doc) => doc?.id === id3) ?? null; return doc1?.value.username === mockUser1.username && doc2 === null && - doc3?.value.username === mockUser1.username - })) + doc3?.value.username === mockUser1.username; + })); assert(snapshots.some((docs) => { - const doc1 = docs.find((doc) => doc?.id === id1) ?? null - const doc2 = docs.find((doc) => doc?.id === id2) ?? null - const doc3 = docs.find((doc) => doc?.id === id3) ?? null + const doc1 = docs.find((doc) => doc?.id === id1) ?? null; + const doc2 = docs.find((doc) => doc?.id === id2) ?? null; + const doc3 = docs.find((doc) => doc?.id === id3) ?? null; return doc1 === null && doc2 === null && - doc3?.value.username === mockUser1.username - })) + doc3?.value.username === mockUser1.username; + })); assert(snapshots.some((docs) => { - const doc1 = docs.find((doc) => doc?.id === id1) ?? null - const doc2 = docs.find((doc) => doc?.id === id2) ?? null - const doc3 = docs.find((doc) => doc?.id === id3) ?? null + const doc1 = docs.find((doc) => doc?.id === id1) ?? null; + const doc2 = docs.find((doc) => doc?.id === id2) ?? null; + const doc3 = docs.find((doc) => doc?.id === id3) ?? null; return doc1 === null && doc2?.value.username === mockUser2.username && - doc3?.value.username === mockUser1.username - })) + doc3?.value.username === mockUser1.username; + })); assert(snapshots.some((docs) => { - const doc1 = docs.find((doc) => doc?.id === id1) ?? null - const doc2 = docs.find((doc) => doc?.id === id2) ?? null - const doc3 = docs.find((doc) => doc?.id === id3) ?? null + const doc1 = docs.find((doc) => doc?.id === id1) ?? null; + const doc2 = docs.find((doc) => doc?.id === id2) ?? null; + const doc3 = docs.find((doc) => doc?.id === id3) ?? null; return doc1 === null && doc2?.value.username === mockUser2.username && - doc3?.value.username === mockUser3.username - })) + doc3?.value.username === mockUser3.username; + })); - await cancel() - await promise - }) - }) + await cancel(); + await promise; + }); + }); await t.step("Should not receive unrelated document updates", async () => { await useDb(async (db) => { - const id1 = "id1" - const id2 = "id2" - const id3 = "id3" - const id4 = "id4" - let count = 0 - let lastDocs: any[] = [] + const id1 = "id1"; + const id2 = "id2"; + const id3 = "id3"; + const id4 = "id4"; + let count = 0; + let lastDocs: any[] = []; const { promise, cancel } = db.users.watchMany( [id1, id2, id3], (docs) => { - count++ - lastDocs = docs + count++; + lastDocs = docs; }, - ) - - await db.users.set(id4, mockUser1) - await sleep(500) - await db.users.set(id4, mockUser2, { overwrite: true }) - await sleep(500) - await db.users.update(id4, mockUser3) - await sleep(500) - await db.users.delete(id4) - await sleep(500) - - assert(count === 1) - assert(lastDocs[0] === null) - assert(lastDocs[1] === null) - assert(lastDocs[2] === null) - - await cancel() - await promise - }) - }) -}) + ); + + await db.users.set(id4, mockUser1); + await sleep(500); + await db.users.set(id4, mockUser2, { overwrite: true }); + await sleep(500); + await db.users.update(id4, mockUser3); + await sleep(500); + await db.users.delete(id4); + await sleep(500); + + assert(count === 1); + assert(lastDocs[0] === null); + assert(lastDocs[1] === null); + assert(lastDocs[2] === null); + + await cancel(); + await promise; + }); + }); +}); diff --git a/tests/db/atomic.test.ts b/tests/db/atomic.test.ts index 6120c33..afd372b 100644 --- a/tests/db/atomic.test.ts +++ b/tests/db/atomic.test.ts @@ -4,11 +4,11 @@ import { type KvValue, model, type QueueMessage, -} from "../../mod.ts" -import { createHandlerId } from "../../src/utils.ts" -import { assert } from "../test.deps.ts" -import { mockUser1, mockUser2, mockUserInvalid } from "../mocks.ts" -import { sleep, useDb, useKv } from "../utils.ts" +} from "../../mod.ts"; +import { createHandlerId } from "../../src/utils.ts"; +import { assert } from "../test.deps.ts"; +import { mockUser1, mockUser2, mockUserInvalid } from "../mocks.ts"; +import { sleep, useDb, useKv } from "../utils.ts"; Deno.test("db - atomic", async (t) => { await t.step("Should add documents to collection", async () => { @@ -17,186 +17,186 @@ Deno.test("db - atomic", async (t) => { .atomic((schema) => schema.users) .add(mockUser1) .add(mockUser2) - .commit() + .commit(); - assert(cr.ok) + assert(cr.ok); - const count = await db.users.count() - assert(count === 2) - }) - }) + const count = await db.users.count(); + assert(count === 2); + }); + }); await t.step( "Should only set first document with colliding ids", async () => { await useDb(async (db) => { - const id = "id" + const id = "id"; const cr = await db .atomic((schema) => schema.users) .set(id, mockUser1) .set(id, mockUser2) - .commit() + .commit(); - assert(cr.ok) + assert(cr.ok); - const count = await db.users.count() - assert(count === 1) - }) + const count = await db.users.count(); + assert(count === 1); + }); }, - ) + ); await t.step("Should delete document", async () => { await useDb(async (db) => { - const cr1 = await db.users.add(mockUser1) - assert(cr1.ok) + const cr1 = await db.users.add(mockUser1); + assert(cr1.ok); const cr2 = await db .atomic((schema) => schema.users) .delete(cr1.id) - .commit() + .commit(); - assert(cr2.ok) + assert(cr2.ok); - const count = await db.users.count() - const doc = await db.users.find(cr1.id) - assert(count === 0) - assert(doc === null) - }) - }) + const count = await db.users.count(); + const doc = await db.users.find(cr1.id); + assert(count === 0); + assert(doc === null); + }); + }); await t.step("Should perform sum operation", async () => { await useDb(async (db) => { - const initial = 100n - const additional = 10n + const initial = 100n; + const additional = 10n; - const cr1 = await db.u64s.add(new Deno.KvU64(initial)) - assert(cr1.ok) + const cr1 = await db.u64s.add(new Deno.KvU64(initial)); + assert(cr1.ok); const cr2 = await db .atomic((schema) => schema.u64s) .sum(cr1.id, additional) - .commit() + .commit(); - assert(cr2.ok) + assert(cr2.ok); - const doc = await db.u64s.find(cr1.id) - assert(doc?.value.value === initial + additional) - }) - }) + const doc = await db.u64s.find(cr1.id); + assert(doc?.value.value === initial + additional); + }); + }); await t.step( "Should perform min operation and set document value to the given value", async () => { await useDb(async (db) => { - const initial = 100n - const min = 10n + const initial = 100n; + const min = 10n; - const cr1 = await db.u64s.add(new Deno.KvU64(initial)) - assert(cr1.ok) + const cr1 = await db.u64s.add(new Deno.KvU64(initial)); + assert(cr1.ok); const cr2 = await db .atomic((schema) => schema.u64s) .min(cr1.id, min) - .commit() + .commit(); - assert(cr2.ok) + assert(cr2.ok); - const doc = await db.u64s.find(cr1.id) - assert(doc?.value.value === min) - }) + const doc = await db.u64s.find(cr1.id); + assert(doc?.value.value === min); + }); }, - ) + ); await t.step( "Should perform min operation and set document value to the existing value", async () => { await useDb(async (db) => { - const initial = 100n - const min = 200n + const initial = 100n; + const min = 200n; - const cr1 = await db.u64s.add(new Deno.KvU64(initial)) - assert(cr1.ok) + const cr1 = await db.u64s.add(new Deno.KvU64(initial)); + assert(cr1.ok); const cr2 = await db .atomic((schema) => schema.u64s) .min(cr1.id, min) - .commit() + .commit(); - assert(cr2.ok) + assert(cr2.ok); - const doc = await db.u64s.find(cr1.id) - assert(doc?.value.value === initial) - }) + const doc = await db.u64s.find(cr1.id); + assert(doc?.value.value === initial); + }); }, - ) + ); await t.step( "Should perform max operation and set document value to the given value", async () => { await useDb(async (db) => { - const initial = 100n - const max = 200n + const initial = 100n; + const max = 200n; - const cr1 = await db.u64s.add(new Deno.KvU64(initial)) - assert(cr1.ok) + const cr1 = await db.u64s.add(new Deno.KvU64(initial)); + assert(cr1.ok); const cr2 = await db .atomic((schema) => schema.u64s) .max(cr1.id, max) - .commit() + .commit(); - assert(cr2.ok) + assert(cr2.ok); - const doc = await db.u64s.find(cr1.id) - assert(doc?.value.value === max) - }) + const doc = await db.u64s.find(cr1.id); + assert(doc?.value.value === max); + }); }, - ) + ); await t.step( "Should perform max operation and set document value to the existing value", async () => { await useDb(async (db) => { - const initial = 100n - const max = 10n + const initial = 100n; + const max = 10n; - const cr1 = await db.u64s.add(new Deno.KvU64(initial)) - assert(cr1.ok) + const cr1 = await db.u64s.add(new Deno.KvU64(initial)); + assert(cr1.ok); const cr2 = await db .atomic((schema) => schema.u64s) .max(cr1.id, max) - .commit() + .commit(); - assert(cr2.ok) + assert(cr2.ok); - const doc = await db.u64s.find(cr1.id) - assert(doc?.value.value === initial) - }) + const doc = await db.u64s.find(cr1.id); + assert(doc?.value.value === initial); + }); }, - ) + ); await t.step("Should perform mutation operations", async () => { await useDb(async (db) => { - const initial = new Deno.KvU64(100n) - const set = new Deno.KvU64(200n) - const add = new Deno.KvU64(300n) - const id = "id" - const sum = new Deno.KvU64(100n) - const min1 = new Deno.KvU64(10n) - const min2 = new Deno.KvU64(200n) - const max1 = new Deno.KvU64(200n) - const max2 = new Deno.KvU64(10n) - - const cr1 = await db.u64s.add(initial) - const cr2 = await db.u64s.add(initial) - const cr3 = await db.u64s.add(initial) - const cr4 = await db.u64s.add(initial) - const cr5 = await db.u64s.add(initial) - const cr6 = await db.u64s.add(initial) - - assert(cr1.ok && cr2.ok && cr3.ok && cr4.ok && cr5.ok && cr6.ok) + const initial = new Deno.KvU64(100n); + const set = new Deno.KvU64(200n); + const add = new Deno.KvU64(300n); + const id = "id"; + const sum = new Deno.KvU64(100n); + const min1 = new Deno.KvU64(10n); + const min2 = new Deno.KvU64(200n); + const max1 = new Deno.KvU64(200n); + const max2 = new Deno.KvU64(10n); + + const cr1 = await db.u64s.add(initial); + const cr2 = await db.u64s.add(initial); + const cr3 = await db.u64s.add(initial); + const cr4 = await db.u64s.add(initial); + const cr5 = await db.u64s.add(initial); + const cr6 = await db.u64s.add(initial); + + assert(cr1.ok && cr2.ok && cr3.ok && cr4.ok && cr5.ok && cr6.ok); await db .atomic((schema) => schema.u64s) @@ -241,77 +241,77 @@ Deno.test("db - atomic", async (t) => { type: "delete", }, ) - .commit() + .commit(); - const docSet = await db.u64s.find(id) + const docSet = await db.u64s.find(id); const { result: [docAdd] } = await db.u64s.getMany({ filter: (d) => d.value.value === 300n, - }) - - const doc1 = await db.u64s.find(cr1.id) - const doc2 = await db.u64s.find(cr2.id) - const doc3 = await db.u64s.find(cr3.id) - const doc4 = await db.u64s.find(cr4.id) - const doc5 = await db.u64s.find(cr5.id) - const doc6 = await db.u64s.find(cr6.id) - - assert(docSet?.value.value === set.value) - assert(docAdd?.value.value === add.value) - assert(doc1?.value.value === initial.value + sum.value) - assert(doc2?.value.value === min1.value) - assert(doc3?.value.value === initial.value) - assert(doc4?.value.value === max1.value) - assert(doc5?.value.value === initial.value) - assert(doc6 === null) - }) - }) + }); + + const doc1 = await db.u64s.find(cr1.id); + const doc2 = await db.u64s.find(cr2.id); + const doc3 = await db.u64s.find(cr3.id); + const doc4 = await db.u64s.find(cr4.id); + const doc5 = await db.u64s.find(cr5.id); + const doc6 = await db.u64s.find(cr6.id); + + assert(docSet?.value.value === set.value); + assert(docAdd?.value.value === add.value); + assert(doc1?.value.value === initial.value + sum.value); + assert(doc2?.value.value === min1.value); + assert(doc3?.value.value === initial.value); + assert(doc4?.value.value === max1.value); + assert(doc5?.value.value === initial.value); + assert(doc6 === null); + }); + }); await t.step("Should enqueue message with string data", async () => { await useKv(async (kv) => { - const data = "data" - const undeliveredId = "undelivered" + const data = "data"; + const undeliveredId = "undelivered"; const db = kvdex(kv, { numbers: collection(model()), - }) + }); - const handlerId = createHandlerId(db.numbers._keys.base, undefined) + const handlerId = createHandlerId(db.numbers._keys.base, undefined); - let assertion = false + let assertion = false; const listener = kv.listenQueue((msg) => { - const qMsg = msg as QueueMessage - assertion = qMsg.__handlerId__ === handlerId && qMsg.__data__ === data - }) + const qMsg = msg as QueueMessage; + assertion = qMsg.__handlerId__ === handlerId && qMsg.__data__ === data; + }); await db .atomic((schema) => schema.numbers) .enqueue("data", { idsIfUndelivered: [undeliveredId], }) - .commit() + .commit(); - await sleep(100) + await sleep(100); - const undelivered = await db.numbers.findUndelivered(undeliveredId) - assert(assertion || typeof undelivered?.value === typeof data) + const undelivered = await db.numbers.findUndelivered(undeliveredId); + assert(assertion || typeof undelivered?.value === typeof data); - return async () => await listener - }) - }) + return async () => await listener; + }); + }); await t.step("Should successfully parse and add documents", async () => { await useDb(async (db) => { const cr1 = await db .atomic((schema) => schema.z_users) .add(mockUser1) - .commit() + .commit(); const cr2 = await db .atomic((schema) => schema.users) .set("id2", mockUser1) - .commit() + .commit(); const cr3 = await db .atomic((schema) => schema.users) @@ -320,36 +320,36 @@ Deno.test("db - atomic", async (t) => { id: "id3", value: mockUser1, }) - .commit() + .commit(); - assert(cr1.ok) - assert(cr2.ok) - assert(cr3.ok) - }) - }) + assert(cr1.ok); + assert(cr2.ok); + assert(cr3.ok); + }); + }); await t.step("Should fail to parse and adding documents", async () => { await useDb(async (db) => { - let assertion1 = false - let assertion2 = false - let assertion3 = false + let assertion1 = false; + let assertion2 = false; + let assertion3 = false; try { await db .atomic((schema) => schema.z_users) .add(mockUserInvalid) - .commit() + .commit(); } catch (_) { - assertion1 = true + assertion1 = true; } try { await db .atomic((schema) => schema.z_users) .set("id2", mockUserInvalid) - .commit() + .commit(); } catch (_) { - assertion2 = true + assertion2 = true; } try { @@ -360,52 +360,52 @@ Deno.test("db - atomic", async (t) => { id: "id3", value: mockUserInvalid, }) - .commit() + .commit(); } catch (_) { - assertion3 = true + assertion3 = true; } - assert(assertion1) - assert(assertion2) - assert(assertion3) - }) - }) + assert(assertion1); + assert(assertion2); + assert(assertion3); + }); + }); await t.step("Should retain history in correct order", async () => { await useKv(async (kv) => { const db = kvdex(kv, { numbers: collection(model(), { history: true }), - }) + }); - const id = "id" + const id = "id"; await db .atomic((s) => s.numbers) .add(100) .set(id, 200) - .commit() + .commit(); - await sleep(10) + await sleep(10); await db .atomic((s) => s.numbers) .delete(id) - .commit() + .commit(); const { result: [doc] } = await db.numbers.getMany({ filter: (d) => d.value === 100, - }) + }); - const { result: [h] } = await db.numbers.findHistory(doc.id) - assert(h.type === "write") - assert(h.value === 100) + const { result: [h] } = await db.numbers.findHistory(doc.id); + assert(h.type === "write"); + assert(h.value === 100); - const { result: [h1, h2] } = await db.numbers.findHistory(id) + const { result: [h1, h2] } = await db.numbers.findHistory(id); - assert(h1.type === "write") - assert(h1.value === 200) - assert(h1.timestamp.valueOf() <= h2.timestamp.valueOf()) - assert(h2.type === "delete") - }) - }) -}) + assert(h1.type === "write"); + assert(h1.value === 200); + assert(h1.timestamp.valueOf() <= h2.timestamp.valueOf()); + assert(h2.type === "delete"); + }); + }); +}); diff --git a/tests/db/countAll.test.ts b/tests/db/countAll.test.ts index 41fe0bf..b0f30ff 100644 --- a/tests/db/countAll.test.ts +++ b/tests/db/countAll.test.ts @@ -1,29 +1,29 @@ -import { assert } from "../test.deps.ts" -import { generateLargeUsers, generateUsers, useDb } from "../utils.ts" +import { assert } from "../test.deps.ts"; +import { generateLargeUsers, generateUsers, useDb } from "../utils.ts"; Deno.test("db - countAll", async (t) => { await t.step( "Should correctly count all documents in the database", async () => { await useDb(async (db) => { - const users = generateUsers(10) - const largeUsers = generateLargeUsers(10) + const users = generateUsers(10); + const largeUsers = generateLargeUsers(10); const u64s = [ new Deno.KvU64(0n), new Deno.KvU64(0n), - ] + ]; - const cr1 = await db.i_users.addMany(users) - const cr2 = await db.s_users.addMany(largeUsers) - const cr3 = await db.u64s.addMany(u64s) + const cr1 = await db.i_users.addMany(users); + const cr2 = await db.s_users.addMany(largeUsers); + const cr3 = await db.u64s.addMany(u64s); - assert(cr1.ok) - assert(cr2.ok) - assert(cr3.ok) + assert(cr1.ok); + assert(cr2.ok); + assert(cr3.ok); - const count = await db.countAll() - assert(count === users.length + largeUsers.length + u64s.length) - }) + const count = await db.countAll(); + assert(count === users.length + largeUsers.length + u64s.length); + }); }, - ) -}) + ); +}); diff --git a/tests/db/deleteAll.test.ts b/tests/db/deleteAll.test.ts index d2e2f38..f4fe71d 100644 --- a/tests/db/deleteAll.test.ts +++ b/tests/db/deleteAll.test.ts @@ -1,7 +1,7 @@ -import { collection, kvdex, model } from "../../mod.ts" -import { assert } from "../test.deps.ts" -import type { User } from "../models.ts" -import { generateLargeUsers, generateUsers, useKv } from "../utils.ts" +import { collection, kvdex, model } from "../../mod.ts"; +import { assert } from "../test.deps.ts"; +import type { User } from "../models.ts"; +import { generateLargeUsers, generateUsers, useKv } from "../utils.ts"; Deno.test("db - deleteAll", async (t) => { await t.step( @@ -23,42 +23,42 @@ Deno.test("db - deleteAll", async (t) => { u64s: collection(model(), { history: true, }), - }) + }); - const users = generateUsers(100) - const largeUsers = generateLargeUsers(100) + const users = generateUsers(100); + const largeUsers = generateLargeUsers(100); const u64s = [ new Deno.KvU64(10n), new Deno.KvU64(20n), - ] + ]; - const cr1 = await db.i_users.addMany(users) - const cr2 = await db.s_users.addMany(largeUsers) - const cr3 = await db.u64s.addMany(u64s) + const cr1 = await db.i_users.addMany(users); + const cr2 = await db.s_users.addMany(largeUsers); + const cr3 = await db.u64s.addMany(u64s); - assert(cr1.ok) - assert(cr2.ok) - assert(cr3.ok) + assert(cr1.ok); + assert(cr2.ok); + assert(cr3.ok); - const { result: docs1 } = await db.i_users.getMany({ limit: 1 }) - const { result: docs2 } = await db.s_users.getMany({ limit: 1 }) - const { result: docs3 } = await db.u64s.getMany({ limit: 1 }) + const { result: docs1 } = await db.i_users.getMany({ limit: 1 }); + const { result: docs2 } = await db.s_users.getMany({ limit: 1 }); + const { result: docs3 } = await db.u64s.getMany({ limit: 1 }); - const count1 = await db.countAll() - assert(count1 === users.length + largeUsers.length + u64s.length) + const count1 = await db.countAll(); + assert(count1 === users.length + largeUsers.length + u64s.length); - await db.deleteAll() + await db.deleteAll(); - const count2 = await db.countAll() - const { result: h1 } = await db.i_users.findHistory(docs1[0].id) - const { result: h2 } = await db.s_users.findHistory(docs2[0].id) - const { result: h3 } = await db.u64s.findHistory(docs3[0].id) + const count2 = await db.countAll(); + const { result: h1 } = await db.i_users.findHistory(docs1[0].id); + const { result: h2 } = await db.s_users.findHistory(docs2[0].id); + const { result: h3 } = await db.u64s.findHistory(docs3[0].id); - assert(count2 === 0) - assert(h1.length > 0) - assert(h2.length > 0) - assert(h3.length > 0) - }) + assert(count2 === 0); + assert(h1.length > 0); + assert(h2.length > 0); + assert(h3.length > 0); + }); }, - ) -}) + ); +}); diff --git a/tests/db/enqueue.test.ts b/tests/db/enqueue.test.ts index fa46193..b76cc53 100644 --- a/tests/db/enqueue.test.ts +++ b/tests/db/enqueue.test.ts @@ -4,117 +4,117 @@ import { type KvValue, model, type QueueMessage, -} from "../../mod.ts" -import { KVDEX_KEY_PREFIX } from "../../src/constants.ts" -import { createHandlerId } from "../../src/utils.ts" -import { assert } from "../test.deps.ts" -import { createResolver, useKv } from "../utils.ts" +} from "../../mod.ts"; +import { KVDEX_KEY_PREFIX } from "../../src/constants.ts"; +import { createHandlerId } from "../../src/utils.ts"; +import { assert } from "../test.deps.ts"; +import { createResolver, useKv } from "../utils.ts"; Deno.test("db - enqueue", async (t) => { await t.step("Should enqueue message with string data", async () => { await useKv(async (kv) => { - const data = "data" - const undeliveredId = "undelivered" - const sleeper = createResolver() + const data = "data"; + const undeliveredId = "undelivered"; + const sleeper = createResolver(); const db = kvdex(kv, { numbers: collection(model()), - }) + }); - const handlerId = createHandlerId([KVDEX_KEY_PREFIX], undefined) + const handlerId = createHandlerId([KVDEX_KEY_PREFIX], undefined); - let assertion = false + let assertion = false; const listener = kv.listenQueue((msg) => { - const qMsg = msg as QueueMessage - assertion = qMsg.__handlerId__ === handlerId && qMsg.__data__ === data - sleeper.resolve() - }) + const qMsg = msg as QueueMessage; + assertion = qMsg.__handlerId__ === handlerId && qMsg.__data__ === data; + sleeper.resolve(); + }); await db.enqueue(data, { idsIfUndelivered: [undeliveredId], - }) + }); - await sleeper.promise + await sleeper.promise; - const undelivered = await db.numbers.findUndelivered(undeliveredId) - assert(assertion || typeof undelivered?.value === typeof data) + const undelivered = await db.numbers.findUndelivered(undeliveredId); + assert(assertion || typeof undelivered?.value === typeof data); - return async () => await listener - }) - }) + return async () => await listener; + }); + }); await t.step("Should enqueue message in correct topic", async () => { await useKv(async (kv) => { - const data = "data" - const undeliveredId = "undelivered" - const topic = "topic" - const sleeper = createResolver() + const data = "data"; + const undeliveredId = "undelivered"; + const topic = "topic"; + const sleeper = createResolver(); const db = kvdex(kv, { numbers: collection(model()), - }) + }); - let assertion1 = false - let assertion2 = true + let assertion1 = false; + let assertion2 = true; const l1 = db.listenQueue(() => { - assertion1 = true - sleeper.resolve() - }, { topic }) + assertion1 = true; + sleeper.resolve(); + }, { topic }); const l2 = db.listenQueue(() => { - assertion2 = false - }) + assertion2 = false; + }); await db.enqueue(data, { idsIfUndelivered: [undeliveredId], topic, - }) + }); - await sleeper.promise + await sleeper.promise; - const undelivered = await db.numbers.findUndelivered(undeliveredId) - assert(assertion1 || typeof undelivered?.value === typeof data) - assert(assertion2) + const undelivered = await db.numbers.findUndelivered(undeliveredId); + assert(assertion1 || typeof undelivered?.value === typeof data); + assert(assertion2); - return async () => await Promise.all([l1, l2]) - }) - }) + return async () => await Promise.all([l1, l2]); + }); + }); await t.step("Should enqueue message with undefined data", async () => { await useKv(async (kv) => { - const data = undefined - const undeliveredId = "undelivered" - const sleeper = createResolver() + const data = undefined; + const undeliveredId = "undelivered"; + const sleeper = createResolver(); const db = kvdex(kv, { numbers: collection(model()), - }) + }); - const handlerId = createHandlerId([KVDEX_KEY_PREFIX], undefined) + const handlerId = createHandlerId([KVDEX_KEY_PREFIX], undefined); - let assertion = false + let assertion = false; const listener = kv.listenQueue((msg) => { - const qMsg = msg as QueueMessage + const qMsg = msg as QueueMessage; assertion = qMsg.__handlerId__ === handlerId && - qMsg.__data__ === data && qMsg.__is_undefined__ === true + qMsg.__data__ === data && qMsg.__is_undefined__ === true; - sleeper.resolve() - }) + sleeper.resolve(); + }); await db.enqueue(data, { idsIfUndelivered: [undeliveredId], - }) + }); - await sleeper.promise + await sleeper.promise; - const undelivered = await db.numbers.findUndelivered(undeliveredId) - assert(assertion || typeof undelivered?.value === typeof data) + const undelivered = await db.numbers.findUndelivered(undeliveredId); + assert(assertion || typeof undelivered?.value === typeof data); - return async () => await listener - }) - }) -}) + return async () => await listener; + }); + }); +}); diff --git a/tests/db/indexable_atomic.test.ts b/tests/db/indexable_atomic.test.ts index 3d2a7c4..ccdf37f 100644 --- a/tests/db/indexable_atomic.test.ts +++ b/tests/db/indexable_atomic.test.ts @@ -1,6 +1,6 @@ -import { assert } from "../test.deps.ts" -import { mockUser1 } from "../mocks.ts" -import { useDb } from "../utils.ts" +import { assert } from "../test.deps.ts"; +import { mockUser1 } from "../mocks.ts"; +import { useDb } from "../utils.ts"; Deno.test("db - indexable_atomic", async (t) => { await t.step( @@ -10,104 +10,104 @@ Deno.test("db - indexable_atomic", async (t) => { await db .atomic((schema) => schema.i_users) .add(mockUser1) - .commit() + .commit(); - const count = await db.i_users.count() + const count = await db.i_users.count(); const byPrimary = await db.i_users.findByPrimaryIndex( "username", mockUser1.username, - ) + ); const bySecondary = await db.i_users.findBySecondaryIndex( "age", mockUser1.age, - ) + ); - assert(count === 1) - assert(byPrimary?.value.username === mockUser1.username) - assert(bySecondary.result.at(0)?.value.username === mockUser1.username) - }) + assert(count === 1); + assert(byPrimary?.value.username === mockUser1.username); + assert(bySecondary.result.at(0)?.value.username === mockUser1.username); + }); }, - ) + ); await t.step( "Should set document in collection with index entries", async () => { await useDb(async (db) => { - const id = "id" + const id = "id"; await db .atomic((schema) => schema.i_users) .set(id, mockUser1) - .commit() + .commit(); - const byId = await db.i_users.find(id) + const byId = await db.i_users.find(id); const byPrimary = await db.i_users.findByPrimaryIndex( "username", mockUser1.username, - ) + ); const bySecondary = await db.i_users.findBySecondaryIndex( "age", mockUser1.age, - ) + ); - assert(byId?.id === id) - assert(byPrimary?.id === id) - assert(bySecondary.result.at(0)?.id === id) - }) + assert(byId?.id === id); + assert(byPrimary?.id === id); + assert(bySecondary.result.at(0)?.id === id); + }); }, - ) + ); await t.step( "Should not set document in collection with colliding primary index", async () => { await useDb(async (db) => { - const cr1 = await db.i_users.add(mockUser1) - assert(cr1.ok) + const cr1 = await db.i_users.add(mockUser1); + assert(cr1.ok); const cr2 = await db .atomic((schema) => schema.i_users) .add(mockUser1) - .commit() + .commit(); - assert(!cr2.ok) - }) + assert(!cr2.ok); + }); }, - ) + ); await t.step( "Should delete document and indices from collection", async () => { await useDb(async (db) => { - const cr1 = await db.i_users.add(mockUser1) - assert(cr1.ok) + const cr1 = await db.i_users.add(mockUser1); + assert(cr1.ok); await db .atomic((schema) => schema.i_users) .delete(cr1.id) - .commit() + .commit(); - const byId = await db.i_users.find(cr1.id) + const byId = await db.i_users.find(cr1.id); const byPrimary = await db.i_users.findByPrimaryIndex( "username", mockUser1.username, - ) + ); const bySecondary = await db.i_users.findBySecondaryIndex( "age", mockUser1.age, - ) + ); - assert(byId === null) - assert(byPrimary === null) - assert(bySecondary.result.length === 0) - }) + assert(byId === null); + assert(byPrimary === null); + assert(bySecondary.result.length === 0); + }); }, - ) + ); await t.step( "Should fail operation when trying to set and delete from the same indexbale collection", @@ -117,10 +117,10 @@ Deno.test("db - indexable_atomic", async (t) => { .atomic((schema) => schema.i_users) .add(mockUser1) .delete("id") - .commit() + .commit(); - assert(!cr.ok) - }) + assert(!cr.ok); + }); }, - ) -}) + ); +}); diff --git a/tests/db/kvdex.test.ts b/tests/db/kvdex.test.ts index 619145e..38906c4 100644 --- a/tests/db/kvdex.test.ts +++ b/tests/db/kvdex.test.ts @@ -1,7 +1,7 @@ -import { collection, kvdex, model } from "../../mod.ts" -import { KVDEX_KEY_PREFIX } from "../../src/constants.ts" -import { assert } from "../test.deps.ts" -import { useKv } from "../utils.ts" +import { collection, kvdex, model } from "../../mod.ts"; +import { KVDEX_KEY_PREFIX } from "../../src/constants.ts"; +import { assert } from "../test.deps.ts"; +import { useKv } from "../utils.ts"; Deno.test("db - kvdex", async (t) => { await t.step( @@ -13,15 +13,15 @@ Deno.test("db - kvdex", async (t) => { nested: { numbers: collection(model()), }, - }) + }); - const key1 = JSON.stringify(db.numbers._keys.base) - const key2 = JSON.stringify(db.nested.numbers._keys.base) + const key1 = JSON.stringify(db.numbers._keys.base); + const key2 = JSON.stringify(db.nested.numbers._keys.base); - assert(key1 !== key2) - assert(key1 === `["${KVDEX_KEY_PREFIX}","numbers"]`) - assert(key2 === `["${KVDEX_KEY_PREFIX}","nested","numbers"]`) - }) + assert(key1 !== key2); + assert(key1 === `["${KVDEX_KEY_PREFIX}","numbers"]`); + assert(key2 === `["${KVDEX_KEY_PREFIX}","nested","numbers"]`); + }); }, - ) -}) + ); +}); diff --git a/tests/db/listenQueue.test.ts b/tests/db/listenQueue.test.ts index c9d904e..8fccb28 100644 --- a/tests/db/listenQueue.test.ts +++ b/tests/db/listenQueue.test.ts @@ -4,64 +4,64 @@ import { type KvValue, model, type QueueMessage, -} from "../../mod.ts" -import { KVDEX_KEY_PREFIX } from "../../src/constants.ts" -import { createHandlerId } from "../../src/utils.ts" -import { assert } from "../test.deps.ts" -import { createResolver, sleep, useKv } from "../utils.ts" +} from "../../mod.ts"; +import { KVDEX_KEY_PREFIX } from "../../src/constants.ts"; +import { createHandlerId } from "../../src/utils.ts"; +import { assert } from "../test.deps.ts"; +import { createResolver, sleep, useKv } from "../utils.ts"; Deno.test("db - listenQueue", async (t) => { await t.step("Should receive message with string data", async () => { await useKv(async (kv) => { - const data = "data" - const db = kvdex(kv, {}) - const sleeper = createResolver() + const data = "data"; + const db = kvdex(kv, {}); + const sleeper = createResolver(); - const handlerId = createHandlerId([KVDEX_KEY_PREFIX], undefined) + const handlerId = createHandlerId([KVDEX_KEY_PREFIX], undefined); - let assertion = false + let assertion = false; const listener = db.listenQueue((msgData) => { - assertion = msgData === data - sleeper.resolve() - }) + assertion = msgData === data; + sleeper.resolve(); + }); const msg: QueueMessage = { __is_undefined__: false, __handlerId__: handlerId, __data__: data, - } + }; - await kv.enqueue(msg) + await kv.enqueue(msg); - await sleeper.promise - assert(assertion) + await sleeper.promise; + assert(assertion); - return async () => await listener - }) - }) + return async () => await listener; + }); + }); await t.step("Should not receive collection queue message", async () => { await useKv(async (kv) => { - const data = "data" + const data = "data"; const db = kvdex(kv, { numbers: collection(model()), - }) + }); - let assertion = true + let assertion = true; const listener = db.listenQueue(() => { - assertion = false - }) + assertion = false; + }); - await db.numbers.enqueue(data) + await db.numbers.enqueue(data); - await sleep(500) + await sleep(500); - assert(assertion) + assert(assertion); - return async () => await listener - }) - }) -}) + return async () => await listener; + }); + }); +}); diff --git a/tests/db/loop.test.ts b/tests/db/loop.test.ts index fffdbc0..20550cf 100644 --- a/tests/db/loop.test.ts +++ b/tests/db/loop.test.ts @@ -1,65 +1,65 @@ -import { kvdex } from "../../mod.ts" -import { assert } from "../test.deps.ts" -import { createResolver, useDb, useKv } from "../utils.ts" +import { kvdex } from "../../mod.ts"; +import { assert } from "../test.deps.ts"; +import { createResolver, useDb, useKv } from "../utils.ts"; Deno.test("db - loop", async (t) => { await t.step( "Should run both loops for 0 iterations, by terminating before the first task is called", async () => { await useDb(async (db) => { - const sleeper1 = createResolver() - const sleeper2 = createResolver() - let count1 = 0 - let count2 = 0 + const sleeper1 = createResolver(); + const sleeper2 = createResolver(); + let count1 = 0; + let count2 = 0; const listener1 = db.loop(() => count1++, { while: ({ first }) => !first, onExit: () => sleeper1.resolve(), - }) + }); const listener2 = db.loop(() => count2++, { while: (msg) => msg.count >= 1, onExit: () => sleeper2.resolve(), - }) + }); - await sleeper1.promise - await sleeper2.promise + await sleeper1.promise; + await sleeper2.promise; - assert(count1 === 0) - assert(count2 === 0) + assert(count1 === 0); + assert(count2 === 0); - return async () => await Promise.all([listener1, listener2]) - }) + return async () => await Promise.all([listener1, listener2]); + }); }, - ) + ); await t.step( "Should run loop for 10 iterations and carry accumulated result", async () => { await useKv(async (kv) => { - const db = kvdex(kv, {}) - const { resolve, promise } = createResolver() - let count = 0 + const db = kvdex(kv, {}); + const { resolve, promise } = createResolver(); + let count = 0; const listener = db.loop( ({ first, result }) => { - count++ - return first ? 1 : result + 1 + count++; + return first ? 1 : result + 1; }, { while: ({ first, result }) => first || result < 10, onExit: ({ result }) => { - assert(result === 10) - assert(count === 10) - resolve() + assert(result === 10); + assert(count === 10); + resolve(); }, }, - ) + ); - await promise + await promise; - return async () => await listener - }) + return async () => await listener; + }); }, - ) -}) + ); +}); diff --git a/tests/db/properties.test.ts b/tests/db/properties.test.ts index 83d8c38..d6a8d34 100644 --- a/tests/db/properties.test.ts +++ b/tests/db/properties.test.ts @@ -1,16 +1,16 @@ -import { openKv } from "npm:@deno/kv" -import { kvdex } from "../../mod.ts" +import { openKv } from "npm:@deno/kv"; +import { kvdex } from "../../mod.ts"; Deno.test("db - properties", async (t) => { await t.step("Should allow native Deno KV type", async () => { - const kv = await Deno.openKv() - kvdex(kv, {}) - kv.close() - }) + const kv = await Deno.openKv(); + kvdex(kv, {}); + kv.close(); + }); await t.step("Should allow NPM Deno KV type", async () => { - const kv = await openKv() - kvdex(kv, {}) - kv.close() - }) -}) + const kv = await openKv(); + kvdex(kv, {}); + kv.close(); + }); +}); diff --git a/tests/db/setInterval.test.ts b/tests/db/setInterval.test.ts index 775104d..49a6c7a 100644 --- a/tests/db/setInterval.test.ts +++ b/tests/db/setInterval.test.ts @@ -1,23 +1,23 @@ -import { assert } from "../test.deps.ts" -import { createResolver, useDb } from "../utils.ts" +import { assert } from "../test.deps.ts"; +import { createResolver, useDb } from "../utils.ts"; Deno.test("db - setInterval", async (t) => { await t.step( "Should run callback function given amount of times", async () => { await useDb(async (db) => { - let count1 = 0 - let count2 = 0 - let count3 = 0 + let count1 = 0; + let count2 = 0; + let count3 = 0; - const sleeper1 = createResolver() - const sleeper2 = createResolver() - const sleeper3 = createResolver() + const sleeper1 = createResolver(); + const sleeper2 = createResolver(); + const sleeper3 = createResolver(); const l1 = db.setInterval(() => count1++, 10, { while: ({ count }) => count < 2, onExit: sleeper1.resolve, - }) + }); const l2 = db.setInterval( () => count2++, @@ -26,23 +26,23 @@ Deno.test("db - setInterval", async (t) => { while: ({ first }) => !first, onExit: sleeper2.resolve, }, - ) + ); const l3 = db.setInterval(() => count3++, 10, { while: ({ interval }) => interval <= 0, onExit: sleeper3.resolve, - }) + }); - await sleeper1.promise - await sleeper2.promise - await sleeper3.promise + await sleeper1.promise; + await sleeper2.promise; + await sleeper3.promise; - assert(count1 === 2) - assert(count2 === 0) - assert(count3 === 1) + assert(count1 === 2); + assert(count2 === 0); + assert(count3 === 1); - return async () => await Promise.all([l1, l2, l3]) - }) + return async () => await Promise.all([l1, l2, l3]); + }); }, - ) -}) + ); +}); diff --git a/tests/db/wipe.test.ts b/tests/db/wipe.test.ts index aa56aa0..7bb84c0 100644 --- a/tests/db/wipe.test.ts +++ b/tests/db/wipe.test.ts @@ -1,7 +1,7 @@ -import { collection, kvdex, model } from "../../mod.ts" -import { assert } from "../test.deps.ts" -import type { User } from "../models.ts" -import { generateLargeUsers, generateUsers, useKv } from "../utils.ts" +import { collection, kvdex, model } from "../../mod.ts"; +import { assert } from "../test.deps.ts"; +import type { User } from "../models.ts"; +import { generateLargeUsers, generateUsers, useKv } from "../utils.ts"; Deno.test("db - wipe", async (t) => { await t.step( @@ -23,42 +23,42 @@ Deno.test("db - wipe", async (t) => { u64s: collection(model(), { history: true, }), - }) + }); - const users = generateUsers(100) - const largeUsers = generateLargeUsers(100) + const users = generateUsers(100); + const largeUsers = generateLargeUsers(100); const u64s = [ new Deno.KvU64(10n), new Deno.KvU64(20n), - ] + ]; - const cr1 = await db.i_users.addMany(users) - const cr2 = await db.s_users.addMany(largeUsers) - const cr3 = await db.u64s.addMany(u64s) + const cr1 = await db.i_users.addMany(users); + const cr2 = await db.s_users.addMany(largeUsers); + const cr3 = await db.u64s.addMany(u64s); - assert(cr1.ok) - assert(cr2.ok) - assert(cr3.ok) + assert(cr1.ok); + assert(cr2.ok); + assert(cr3.ok); - const { result: docs1 } = await db.i_users.getMany({ limit: 1 }) - const { result: docs2 } = await db.s_users.getMany({ limit: 1 }) - const { result: docs3 } = await db.u64s.getMany({ limit: 1 }) + const { result: docs1 } = await db.i_users.getMany({ limit: 1 }); + const { result: docs2 } = await db.s_users.getMany({ limit: 1 }); + const { result: docs3 } = await db.u64s.getMany({ limit: 1 }); - const count1 = await db.countAll() - assert(count1 === users.length + largeUsers.length + u64s.length) + const count1 = await db.countAll(); + assert(count1 === users.length + largeUsers.length + u64s.length); - await db.wipe() + await db.wipe(); - const count2 = await db.countAll() - const { result: h1 } = await db.i_users.findHistory(docs1[0].id) - const { result: h2 } = await db.s_users.findHistory(docs2[0].id) - const { result: h3 } = await db.u64s.findHistory(docs3[0].id) + const count2 = await db.countAll(); + const { result: h1 } = await db.i_users.findHistory(docs1[0].id); + const { result: h2 } = await db.s_users.findHistory(docs2[0].id); + const { result: h3 } = await db.u64s.findHistory(docs3[0].id); - assert(count2 === 0) - assert(h1.length === 0) - assert(h2.length === 0) - assert(h3.length === 0) - }) + assert(count2 === 0); + assert(h1.length === 0); + assert(h2.length === 0); + assert(h3.length === 0); + }); }, - ) -}) + ); +}); diff --git a/tests/document/flat.test.ts b/tests/document/flat.test.ts index 6a77331..12bccfb 100644 --- a/tests/document/flat.test.ts +++ b/tests/document/flat.test.ts @@ -1,56 +1,56 @@ -import { Document, model } from "../../mod.ts" -import { assert } from "../test.deps.ts" -import { mockUser1 } from "../mocks.ts" -import type { User } from "../models.ts" +import { Document, model } from "../../mod.ts"; +import { assert } from "../test.deps.ts"; +import { mockUser1 } from "../mocks.ts"; +import type { User } from "../models.ts"; Deno.test("document - flat", async (t) => { await t.step("Should flatten document of Model type", () => { - const id = "id" - const versionstamp = "000" + const id = "id"; + const versionstamp = "000"; const doc = new Document(model(), { id, versionstamp, value: mockUser1, - }) + }); - const flattened = doc.flat() - assert(flattened.id === id) - assert(flattened.versionstamp === versionstamp) - assert(flattened.username === mockUser1.username) - }) + const flattened = doc.flat(); + assert(flattened.id === id); + assert(flattened.versionstamp === versionstamp); + assert(flattened.username === mockUser1.username); + }); await t.step("Should not flatten document of primitive type", () => { - const id = "id" - const versionstamp = "000" - const value = 100 + const id = "id"; + const versionstamp = "000"; + const value = 100; const doc = new Document(model(), { id, versionstamp, value, - }) + }); - const flattened = doc.flat() - assert(flattened.id === id) - assert(flattened.versionstamp === versionstamp) - assert(flattened.value === value) - }) + const flattened = doc.flat(); + assert(flattened.id === id); + assert(flattened.versionstamp === versionstamp); + assert(flattened.value === value); + }); await t.step("Should not flatten document of built-in object type", () => { - const id = "id" - const versionstamp = "000" - const value = new Date() + const id = "id"; + const versionstamp = "000"; + const value = new Date(); const doc = new Document(model(), { id, versionstamp, value, - }) - - const flattened = doc.flat() - assert(flattened.id === id) - assert(flattened.versionstamp === versionstamp) - assert(flattened.value.valueOf() === value.valueOf()) - }) -}) + }); + + const flattened = doc.flat(); + assert(flattened.id === id); + assert(flattened.versionstamp === versionstamp); + assert(flattened.value.valueOf() === value.valueOf()); + }); +}); diff --git a/tests/document/properties.test.ts b/tests/document/properties.test.ts index 87e6a50..75d035b 100644 --- a/tests/document/properties.test.ts +++ b/tests/document/properties.test.ts @@ -1,58 +1,58 @@ -import { Document, model } from "../../mod.ts" -import { assert, z } from "../test.deps.ts" +import { Document, model } from "../../mod.ts"; +import { assert, z } from "../test.deps.ts"; Deno.test("document - properties", async (t) => { await t.step("Should create document with Model", () => { - let assertion = true + let assertion = true; try { new Document(model(), { id: "id", versionstamp: "000", value: 100, - }) + }); } catch (_) { - assertion = false + assertion = false; } - assert(assertion) - }) + assert(assertion); + }); await t.step( "Should parse and create document with ParserModel (zod)", () => { - let assertion = true + let assertion = true; try { new Document(z.number(), { id: "id", versionstamp: "000", value: 100, - }) + }); } catch (_) { - assertion = false + assertion = false; } - assert(assertion) + assert(assertion); }, - ) + ); await t.step( "Should fail to parse and create document with ParserModel (zod)", () => { - let assertion = false + let assertion = false; try { new Document(z.number(), { id: "id", versionstamp: "000", value: "100" as unknown as number, - }) + }); } catch (_) { - assertion = true + assertion = true; } - assert(assertion) + assert(assertion); }, - ) -}) + ); +}); diff --git a/tests/ext/kv.test.ts b/tests/ext/kv.test.ts index cce0bdb..9c57ded 100644 --- a/tests/ext/kv.test.ts +++ b/tests/ext/kv.test.ts @@ -1,129 +1,129 @@ -import { MapKv } from "../../src/ext/kv/map_kv.ts" -import { StorageAdapter } from "../../src/ext/kv/mod.ts" -import { assert, assertEquals } from "../test.deps.ts" -import { sleep } from "../utils.ts" +import { MapKv } from "../../src/ext/kv/map_kv.ts"; +import { StorageAdapter } from "../../src/ext/kv/mod.ts"; +import { assert, assertEquals } from "../test.deps.ts"; +import { sleep } from "../utils.ts"; async function useStore(fn: (store: StorageAdapter) => unknown) { - const store = new StorageAdapter(localStorage) - await fn(store) - store.clear() + const store = new StorageAdapter(localStorage); + await fn(store); + store.clear(); } Deno.test("ext - kv", async (t) => { await t.step("set", async (t) => { await t.step("Should set new entry", () => { - const kv = new MapKv() - const key = ["test"] + const kv = new MapKv(); + const key = ["test"]; - const cr = kv.set(key, 10) - const entry = kv.get(key) - assert(cr.ok) - assert(entry.value !== null) - assert(entry.versionstamp !== null) - }) + const cr = kv.set(key, 10); + const entry = kv.get(key); + assert(cr.ok); + assert(entry.value !== null); + assert(entry.versionstamp !== null); + }); await t.step("Should remove new entry after expire time", async () => { - const kv = new MapKv() - const key = ["test"] + const kv = new MapKv(); + const key = ["test"]; - const cr = kv.set(key, 10, { expireIn: 100 }) - const entry1 = kv.get(key) - assert(cr.ok) - assert(entry1.value !== null) - assert(entry1.versionstamp !== null) + const cr = kv.set(key, 10, { expireIn: 100 }); + const entry1 = kv.get(key); + assert(cr.ok); + assert(entry1.value !== null); + assert(entry1.versionstamp !== null); - await sleep(500) + await sleep(500); - const entry2 = kv.get(key) - assert(entry2.value === null) - assert(entry2.versionstamp === null) - }) - }) + const entry2 = kv.get(key); + assert(entry2.value === null); + assert(entry2.versionstamp === null); + }); + }); await t.step("get", async (t) => { await t.step("Should successfully get entry by key", () => { - const kv = new MapKv() - const key = ["test"] - const val = 10 - - const cr = kv.set(key, val) - const entry = kv.get(key) - assert(cr.ok) - assert(entry.value === val) - assert(entry.versionstamp !== null) - }) - }) + const kv = new MapKv(); + const key = ["test"]; + const val = 10; + + const cr = kv.set(key, val); + const entry = kv.get(key); + assert(cr.ok); + assert(entry.value === val); + assert(entry.versionstamp !== null); + }); + }); await t.step("getMany", async (t) => { await t.step("Should successfully get entries by keys", () => { - const kv = new MapKv() + const kv = new MapKv(); const entries = [ [["test", 1], 10], [["test", 2], 20], [["test", 3], 30], - ] + ]; - const crs = entries.map(([key, val]) => kv.set(key as any, val)) - assert(crs.every((cr) => cr.ok)) + const crs = entries.map(([key, val]) => kv.set(key as any, val)); + assert(crs.every((cr) => cr.ok)); - const getEntries = kv.getMany(entries.map(([k]) => k as any)) + const getEntries = kv.getMany(entries.map(([k]) => k as any)); getEntries.forEach((entry) => { - assert(entries.some(([_, val]) => val === entry.value)) - }) - }) - }) + assert(entries.some(([_, val]) => val === entry.value)); + }); + }); + }); await t.step("delete", async (t) => { await t.step("Should successfully delete entry by key", () => { - const kv = new MapKv() - const key = ["test"] + const kv = new MapKv(); + const key = ["test"]; - const cr = kv.set(key, 10) - const entry1 = kv.get(key) - assert(cr.ok) - assert(entry1.value !== null) - assert(entry1.versionstamp !== null) + const cr = kv.set(key, 10); + const entry1 = kv.get(key); + assert(cr.ok); + assert(entry1.value !== null); + assert(entry1.versionstamp !== null); - kv.delete(key) + kv.delete(key); - const entry2 = kv.get(key) - assert(entry2.value === null) - assert(entry2.versionstamp === null) - }) - }) + const entry2 = kv.get(key); + assert(entry2.value === null); + assert(entry2.versionstamp === null); + }); + }); await t.step("list", async (t) => { await t.step("Should list all entries in ascending order", async () => { - const kv = new MapKv() + const kv = new MapKv(); const entries = [ [["test", 1], 10], [["test", 2], 20], [["test", 3], 30], - ] + ]; - const crs = entries.map(([key, val]) => kv.set(key as any, val)) - assert(crs.every((cr) => cr.ok)) + const crs = entries.map(([key, val]) => kv.set(key as any, val)); + assert(crs.every((cr) => cr.ok)); - const iter = kv.list({ prefix: [] }) - const listEntries = await Array.fromAsync(iter) + const iter = kv.list({ prefix: [] }); + const listEntries = await Array.fromAsync(iter); listEntries.forEach((entry, i) => { - assert(entry.value === entries[i][1]) - }) - }) - }) + assert(entry.value === entries[i][1]); + }); + }); + }); await t.step("storage_adapter (localStorage)", async (t) => { await t.step("Should set and get new entry", async () => { await useStore((store) => { - const key = "key" - const val = 10 - store.set(key, val) - const item = store.get(key) - assertEquals(val, item) - }) - }) + const key = "key"; + const val = 10; + store.set(key, val); + const item = store.get(key); + assertEquals(val, item); + }); + }); await t.step("Should get all entries", async () => { await useStore((store) => { @@ -133,33 +133,33 @@ Deno.test("ext - kv", async (t) => { ["3", 30], ["4", 40], ["5", 50], - ] as const + ] as const; for (const [key, val] of entries) { - store.set(key, val) + store.set(key, val); } - const storeEntries = Array.from(store.entries()) - assertEquals(entries.length, storeEntries.length) + const storeEntries = Array.from(store.entries()); + assertEquals(entries.length, storeEntries.length); for (const [key, val] of storeEntries) { - assert(entries.some(([k, v]) => k === key && v === val)) + assert(entries.some(([k, v]) => k === key && v === val)); } - }) - }) + }); + }); await t.step("Should delete entry by key", async () => { await useStore((store) => { - const key = "key" - const val = 10 - store.set(key, val) - const item1 = store.get(key) - assertEquals(item1, val) - store.delete(key) - const item2 = store.get(key) - assertEquals(item2, undefined) - }) - }) + const key = "key"; + const val = 10; + store.set(key, val); + const item1 = store.get(key); + assertEquals(item1, val); + store.delete(key); + const item2 = store.get(key); + assertEquals(item2, undefined); + }); + }); await t.step("Should delete entry by key", async () => { await useStore((store) => { @@ -169,19 +169,19 @@ Deno.test("ext - kv", async (t) => { ["3", 30], ["4", 40], ["5", 50], - ] as const + ] as const; for (const [key, val] of entries) { - store.set(key, val) + store.set(key, val); } - const storeEntries1 = Array.from(store.entries()) - assertEquals(storeEntries1.length, entries.length) + const storeEntries1 = Array.from(store.entries()); + assertEquals(storeEntries1.length, entries.length); - store.clear() - const storeEntries2 = Array.from(store.entries()) - assertEquals(storeEntries2.length, 0) - }) - }) - }) -}) + store.clear(); + const storeEntries2 = Array.from(store.entries()); + assertEquals(storeEntries2.length, 0); + }); + }); + }); +}); diff --git a/tests/ext/migrate.test.ts b/tests/ext/migrate.test.ts index e36da16..e6b99d6 100644 --- a/tests/ext/migrate.test.ts +++ b/tests/ext/migrate.test.ts @@ -1,8 +1,8 @@ -import { migrate } from "../../src/ext/migrate/mod.ts" -import { collection } from "../../src/collection.ts" -import { kvdex } from "../../src/kvdex.ts" -import { model } from "../../src/model.ts" -import { assert, assertEquals } from "../test.deps.ts" +import { migrate } from "../../src/ext/migrate/mod.ts"; +import { collection } from "../../src/collection.ts"; +import { kvdex } from "../../src/kvdex.ts"; +import { model } from "../../src/model.ts"; +import { assert, assertEquals } from "../test.deps.ts"; import { TArray, TBigint, @@ -33,7 +33,7 @@ import { TUint8Array, TUint8ClampedArray, TUndefined, -} from "../values.ts" +} from "../values.ts"; function createDb(kv: Deno.Kv) { return kvdex(kv, { @@ -102,666 +102,682 @@ function createDb(kv: Deno.Kv) { serialize: "json", indices: { TString: "primary", TNumber: "secondary" }, }), - }) + }); } Deno.test("ext - migrate", async (t) => { await t.step("Should only migrate kvdex entries", async () => { - const temp = await Deno.makeTempFile({ suffix: ".sqlite3" }) - using sourceKv = await Deno.openKv(":memory:") - using targetKv = await Deno.openKv(temp) + const temp = await Deno.makeTempFile({ suffix: ".sqlite3" }); + using sourceKv = await Deno.openKv(":memory:"); + using targetKv = await Deno.openKv(temp); - const check_cr = await sourceKv.set(["check"], "check") - assert(check_cr.ok) + const check_cr = await sourceKv.set(["check"], "check"); + assert(check_cr.ok); - const sourceDb = createDb(sourceKv) + const sourceDb = createDb(sourceKv); - const c_TUndefined_cr = await sourceDb.c_TUndefined.add(TUndefined) - const c_TNull_cr = await sourceDb.c_TNull.add(TNull) - const c_TNaN_cr = await sourceDb.c_TNaN.add(TNaN) - const c_TInfinity_cr = await sourceDb.c_TInfinity.add(TInfinity) - const c_TNumber_cr = await sourceDb.c_TNumber.add(TNumber) - const c_TString_cr = await sourceDb.c_TString.add(TString) - const c_TKvU64_cr = await sourceDb.c_TKvU64.add(TKvU64) - const c_TBoolean_cr = await sourceDb.c_TBoolean.add(TBoolean) - const c_TBigint_cr = await sourceDb.c_TBigint.add(TBigint) - const c_TInt8Array_cr = await sourceDb.c_TInt8Array.add(TInt8Array) - const c_TInt16Array_cr = await sourceDb.c_TInt16Array.add(TInt16Array) - const c_TInt32Array_cr = await sourceDb.c_TInt32Array.add(TInt32Array) + const c_TUndefined_cr = await sourceDb.c_TUndefined.add(TUndefined); + const c_TNull_cr = await sourceDb.c_TNull.add(TNull); + const c_TNaN_cr = await sourceDb.c_TNaN.add(TNaN); + const c_TInfinity_cr = await sourceDb.c_TInfinity.add(TInfinity); + const c_TNumber_cr = await sourceDb.c_TNumber.add(TNumber); + const c_TString_cr = await sourceDb.c_TString.add(TString); + const c_TKvU64_cr = await sourceDb.c_TKvU64.add(TKvU64); + const c_TBoolean_cr = await sourceDb.c_TBoolean.add(TBoolean); + const c_TBigint_cr = await sourceDb.c_TBigint.add(TBigint); + const c_TInt8Array_cr = await sourceDb.c_TInt8Array.add(TInt8Array); + const c_TInt16Array_cr = await sourceDb.c_TInt16Array.add(TInt16Array); + const c_TInt32Array_cr = await sourceDb.c_TInt32Array.add(TInt32Array); const c_TBigInt64Array_cr = await sourceDb.c_TBigInt64Array.add( TBigInt64Array, - ) - const c_TUint8Array_cr = await sourceDb.c_TUint8Array.add(TUint8Array) - const c_TUint16Array_cr = await sourceDb.c_TUint16Array.add(TUint16Array) - const c_TUint32Array_cr = await sourceDb.c_TUint32Array.add(TUint32Array) + ); + const c_TUint8Array_cr = await sourceDb.c_TUint8Array.add(TUint8Array); + const c_TUint16Array_cr = await sourceDb.c_TUint16Array.add(TUint16Array); + const c_TUint32Array_cr = await sourceDb.c_TUint32Array.add(TUint32Array); const c_TBigUint64Array_cr = await sourceDb.c_TBigUint64Array.add( TBigUint64Array, - ) + ); const c_TUint8ClampedArray_cr = await sourceDb.c_TUint8ClampedArray.add( TUint8ClampedArray, - ) - const c_TFloat32Array_cr = await sourceDb.c_TFloat32Array.add(TFloat32Array) - const c_TFloat64Array_cr = await sourceDb.c_TFloat64Array.add(TFloat64Array) - const c_TBuffer_cr = await sourceDb.c_TBuffer.add(TBuffer) - const c_TDataView_cr = await sourceDb.c_TDataView.add(TDataView) - const c_TDate_cr = await sourceDb.c_TDate.add(TDate) - const c_TRegExp_cr = await sourceDb.c_TRegExp.add(TRegExp) - const c_TError_cr = await sourceDb.c_TError.add(TError) - const c_TArray_cr = await sourceDb.c_TArray.add(TArray) - const c_TObject_cr = await sourceDb.c_TObject.add(TObject) - const c_TSet_cr = await sourceDb.c_TSet.add(TSet) - const c_TMap_cr = await sourceDb.c_TMap.add(TMap) - const s_TUndefined_cr = await sourceDb.s_TUndefined.add(TUndefined) - const s_TNull_cr = await sourceDb.s_TNull.add(TNull) - const s_TNaN_cr = await sourceDb.s_TNaN.add(TNaN) - const s_TInfinity_cr = await sourceDb.s_TInfinity.add(TInfinity) - const s_TNumber_cr = await sourceDb.s_TNumber.add(TNumber) - const s_TString_cr = await sourceDb.s_TString.add(TString) - const s_TKvU64_cr = await sourceDb.s_TKvU64.add(TKvU64) - const s_TBoolean_cr = await sourceDb.s_TBoolean.add(TBoolean) - const s_TBigint_cr = await sourceDb.s_TBigint.add(TBigint) - const s_TInt8Array_cr = await sourceDb.s_TInt8Array.add(TInt8Array) - const s_TInt16Array_cr = await sourceDb.s_TInt16Array.add(TInt16Array) - const s_TInt32Array_cr = await sourceDb.s_TInt32Array.add(TInt32Array) + ); + const c_TFloat32Array_cr = await sourceDb.c_TFloat32Array.add( + TFloat32Array, + ); + const c_TFloat64Array_cr = await sourceDb.c_TFloat64Array.add( + TFloat64Array, + ); + const c_TBuffer_cr = await sourceDb.c_TBuffer.add(TBuffer); + const c_TDataView_cr = await sourceDb.c_TDataView.add(TDataView); + const c_TDate_cr = await sourceDb.c_TDate.add(TDate); + const c_TRegExp_cr = await sourceDb.c_TRegExp.add(TRegExp); + const c_TError_cr = await sourceDb.c_TError.add(TError); + const c_TArray_cr = await sourceDb.c_TArray.add(TArray); + const c_TObject_cr = await sourceDb.c_TObject.add(TObject); + const c_TSet_cr = await sourceDb.c_TSet.add(TSet); + const c_TMap_cr = await sourceDb.c_TMap.add(TMap); + const s_TUndefined_cr = await sourceDb.s_TUndefined.add(TUndefined); + const s_TNull_cr = await sourceDb.s_TNull.add(TNull); + const s_TNaN_cr = await sourceDb.s_TNaN.add(TNaN); + const s_TInfinity_cr = await sourceDb.s_TInfinity.add(TInfinity); + const s_TNumber_cr = await sourceDb.s_TNumber.add(TNumber); + const s_TString_cr = await sourceDb.s_TString.add(TString); + const s_TKvU64_cr = await sourceDb.s_TKvU64.add(TKvU64); + const s_TBoolean_cr = await sourceDb.s_TBoolean.add(TBoolean); + const s_TBigint_cr = await sourceDb.s_TBigint.add(TBigint); + const s_TInt8Array_cr = await sourceDb.s_TInt8Array.add(TInt8Array); + const s_TInt16Array_cr = await sourceDb.s_TInt16Array.add(TInt16Array); + const s_TInt32Array_cr = await sourceDb.s_TInt32Array.add(TInt32Array); const s_TBigInt64Array_cr = await sourceDb.s_TBigInt64Array.add( TBigInt64Array, - ) - const s_TUint8Array_cr = await sourceDb.s_TUint8Array.add(TUint8Array) - const s_TUint16Array_cr = await sourceDb.s_TUint16Array.add(TUint16Array) - const s_TUint32Array_cr = await sourceDb.s_TUint32Array.add(TUint32Array) + ); + const s_TUint8Array_cr = await sourceDb.s_TUint8Array.add(TUint8Array); + const s_TUint16Array_cr = await sourceDb.s_TUint16Array.add(TUint16Array); + const s_TUint32Array_cr = await sourceDb.s_TUint32Array.add(TUint32Array); const s_TBigUint64Array_cr = await sourceDb.s_TBigUint64Array.add( TBigUint64Array, - ) + ); const s_TUint8ClampedArray_cr = await sourceDb.s_TUint8ClampedArray.add( TUint8ClampedArray, - ) - const s_TFloat32Array_cr = await sourceDb.s_TFloat32Array.add(TFloat32Array) - const s_TFloat64Array_cr = await sourceDb.s_TFloat64Array.add(TFloat64Array) - const s_TBuffer_cr = await sourceDb.s_TBuffer.add(TBuffer) - const s_TDataView_cr = await sourceDb.s_TDataView.add(TDataView) - const s_TDate_cr = await sourceDb.s_TDate.add(TDate) - const s_TRegExp_cr = await sourceDb.s_TRegExp.add(TRegExp) - const s_TError_cr = await sourceDb.s_TError.add(TError) - const s_TArray_cr = await sourceDb.s_TArray.add(TArray) - const s_TObject_cr = await sourceDb.s_TObject.add(TObject) - const s_TSet_cr = await sourceDb.s_TSet.add(TSet) - const s_TMap_cr = await sourceDb.s_TMap.add(TMap) - const i_cr = await sourceDb.i.add(TObject) - const is_cr = await sourceDb.is.add(TObject) + ); + const s_TFloat32Array_cr = await sourceDb.s_TFloat32Array.add( + TFloat32Array, + ); + const s_TFloat64Array_cr = await sourceDb.s_TFloat64Array.add( + TFloat64Array, + ); + const s_TBuffer_cr = await sourceDb.s_TBuffer.add(TBuffer); + const s_TDataView_cr = await sourceDb.s_TDataView.add(TDataView); + const s_TDate_cr = await sourceDb.s_TDate.add(TDate); + const s_TRegExp_cr = await sourceDb.s_TRegExp.add(TRegExp); + const s_TError_cr = await sourceDb.s_TError.add(TError); + const s_TArray_cr = await sourceDb.s_TArray.add(TArray); + const s_TObject_cr = await sourceDb.s_TObject.add(TObject); + const s_TSet_cr = await sourceDb.s_TSet.add(TSet); + const s_TMap_cr = await sourceDb.s_TMap.add(TMap); + const i_cr = await sourceDb.i.add(TObject); + const is_cr = await sourceDb.is.add(TObject); - assert(c_TUndefined_cr.ok) - assert(c_TNull_cr.ok) - assert(c_TNaN_cr.ok) - assert(c_TInfinity_cr.ok) - assert(c_TString_cr.ok) - assert(c_TNumber_cr.ok) - assert(c_TBigint_cr.ok) - assert(c_TBoolean_cr.ok) - assert(c_TKvU64_cr.ok) - assert(c_TUint8Array_cr.ok) - assert(c_TUint16Array_cr.ok) - assert(c_TUint32Array_cr.ok) - assert(c_TBigUint64Array_cr.ok) - assert(c_TUint8ClampedArray_cr.ok) - assert(c_TInt8Array_cr.ok) - assert(c_TInt16Array_cr.ok) - assert(c_TInt32Array_cr.ok) - assert(c_TBigInt64Array_cr.ok) - assert(c_TBuffer_cr.ok) - assert(c_TDataView_cr.ok) - assert(c_TDate_cr.ok) - assert(c_TError_cr.ok) - assert(c_TRegExp_cr.ok) - assert(c_TFloat32Array_cr.ok) - assert(c_TFloat64Array_cr.ok) - assert(c_TArray_cr.ok) - assert(c_TObject_cr.ok) - assert(c_TSet_cr.ok) - assert(c_TMap_cr.ok) - assert(s_TUndefined_cr.ok) - assert(s_TNull_cr.ok) - assert(s_TNaN_cr.ok) - assert(s_TInfinity_cr.ok) - assert(s_TString_cr.ok) - assert(s_TNumber_cr.ok) - assert(s_TBigint_cr.ok) - assert(s_TBoolean_cr.ok) - assert(s_TKvU64_cr.ok) - assert(s_TUint8Array_cr.ok) - assert(s_TUint16Array_cr.ok) - assert(s_TUint32Array_cr.ok) - assert(s_TBigUint64Array_cr.ok) - assert(s_TUint8ClampedArray_cr.ok) - assert(s_TInt8Array_cr.ok) - assert(s_TInt16Array_cr.ok) - assert(s_TInt32Array_cr.ok) - assert(s_TBigInt64Array_cr.ok) - assert(s_TBuffer_cr.ok) - assert(s_TDataView_cr.ok) - assert(s_TDate_cr.ok) - assert(s_TError_cr.ok) - assert(s_TRegExp_cr.ok) - assert(s_TFloat32Array_cr.ok) - assert(s_TFloat64Array_cr.ok) - assert(s_TArray_cr.ok) - assert(s_TObject_cr.ok) - assert(s_TSet_cr.ok) - assert(s_TMap_cr.ok) - assert(i_cr.ok) - assert(is_cr.ok) + assert(c_TUndefined_cr.ok); + assert(c_TNull_cr.ok); + assert(c_TNaN_cr.ok); + assert(c_TInfinity_cr.ok); + assert(c_TString_cr.ok); + assert(c_TNumber_cr.ok); + assert(c_TBigint_cr.ok); + assert(c_TBoolean_cr.ok); + assert(c_TKvU64_cr.ok); + assert(c_TUint8Array_cr.ok); + assert(c_TUint16Array_cr.ok); + assert(c_TUint32Array_cr.ok); + assert(c_TBigUint64Array_cr.ok); + assert(c_TUint8ClampedArray_cr.ok); + assert(c_TInt8Array_cr.ok); + assert(c_TInt16Array_cr.ok); + assert(c_TInt32Array_cr.ok); + assert(c_TBigInt64Array_cr.ok); + assert(c_TBuffer_cr.ok); + assert(c_TDataView_cr.ok); + assert(c_TDate_cr.ok); + assert(c_TError_cr.ok); + assert(c_TRegExp_cr.ok); + assert(c_TFloat32Array_cr.ok); + assert(c_TFloat64Array_cr.ok); + assert(c_TArray_cr.ok); + assert(c_TObject_cr.ok); + assert(c_TSet_cr.ok); + assert(c_TMap_cr.ok); + assert(s_TUndefined_cr.ok); + assert(s_TNull_cr.ok); + assert(s_TNaN_cr.ok); + assert(s_TInfinity_cr.ok); + assert(s_TString_cr.ok); + assert(s_TNumber_cr.ok); + assert(s_TBigint_cr.ok); + assert(s_TBoolean_cr.ok); + assert(s_TKvU64_cr.ok); + assert(s_TUint8Array_cr.ok); + assert(s_TUint16Array_cr.ok); + assert(s_TUint32Array_cr.ok); + assert(s_TBigUint64Array_cr.ok); + assert(s_TUint8ClampedArray_cr.ok); + assert(s_TInt8Array_cr.ok); + assert(s_TInt16Array_cr.ok); + assert(s_TInt32Array_cr.ok); + assert(s_TBigInt64Array_cr.ok); + assert(s_TBuffer_cr.ok); + assert(s_TDataView_cr.ok); + assert(s_TDate_cr.ok); + assert(s_TError_cr.ok); + assert(s_TRegExp_cr.ok); + assert(s_TFloat32Array_cr.ok); + assert(s_TFloat64Array_cr.ok); + assert(s_TArray_cr.ok); + assert(s_TObject_cr.ok); + assert(s_TSet_cr.ok); + assert(s_TMap_cr.ok); + assert(i_cr.ok); + assert(is_cr.ok); await migrate({ source: sourceKv, target: targetKv, - }) + }); - const targetDb = createDb(targetKv) + const targetDb = createDb(targetKv); const c_TUndefined_doc = await targetDb.c_TUndefined.find( c_TUndefined_cr.id, - ) - const c_TNull_doc = await targetDb.c_TNull.find(c_TNull_cr.id) - const c_TNaN_doc = await targetDb.c_TNaN.find(c_TNaN_cr.id) - const c_TInfinity_doc = await targetDb.c_TInfinity.find(c_TInfinity_cr.id) - const c_TString_doc = await targetDb.c_TString.find(c_TString_cr.id) - const c_TNumber_doc = await targetDb.c_TNumber.find(c_TNumber_cr.id) - const c_TBigint_doc = await targetDb.c_TBigint.find(c_TBigint_cr.id) - const c_TKvU64_doc = await targetDb.c_TKvU64.find(c_TKvU64_cr.id) - const c_TBoolean_doc = await targetDb.c_TBoolean.find(c_TBoolean_cr.id) + ); + const c_TNull_doc = await targetDb.c_TNull.find(c_TNull_cr.id); + const c_TNaN_doc = await targetDb.c_TNaN.find(c_TNaN_cr.id); + const c_TInfinity_doc = await targetDb.c_TInfinity.find(c_TInfinity_cr.id); + const c_TString_doc = await targetDb.c_TString.find(c_TString_cr.id); + const c_TNumber_doc = await targetDb.c_TNumber.find(c_TNumber_cr.id); + const c_TBigint_doc = await targetDb.c_TBigint.find(c_TBigint_cr.id); + const c_TKvU64_doc = await targetDb.c_TKvU64.find(c_TKvU64_cr.id); + const c_TBoolean_doc = await targetDb.c_TBoolean.find(c_TBoolean_cr.id); const c_TInt8Array_doc = await targetDb.c_TInt8Array.find( c_TInt8Array_cr.id, - ) + ); const c_TInt16Array_doc = await targetDb.c_TInt16Array.find( c_TInt16Array_cr.id, - ) + ); const c_TInt32Array_doc = await targetDb.c_TInt32Array.find( c_TInt32Array_cr.id, - ) + ); const c_TBigInt64Array_doc = await targetDb.c_TBigInt64Array.find( c_TBigInt64Array_cr.id, - ) + ); const c_TUint8Array_doc = await targetDb.c_TUint8Array.find( c_TUint8Array_cr.id, - ) + ); const c_TUint16Array_doc = await targetDb.c_TUint16Array.find( c_TUint16Array_cr.id, - ) + ); const c_TUint32Array_doc = await targetDb.c_TUint32Array.find( c_TUint32Array_cr.id, - ) + ); const c_TBigUint64Array_doc = await targetDb.c_TBigUint64Array.find( c_TBigUint64Array_cr.id, - ) + ); const c_TUint8ClampedArray_doc = await targetDb.c_TUint8ClampedArray.find( c_TUint8ClampedArray_cr.id, - ) + ); const c_TFloat32Array_doc = await targetDb.c_TFloat32Array.find( c_TFloat32Array_cr.id, - ) + ); const c_TFloat64Array_doc = await targetDb.c_TFloat64Array.find( c_TFloat64Array_cr.id, - ) - const c_TBuffer_doc = await targetDb.c_TBuffer.find(c_TBuffer_cr.id) - const c_TDataView_doc = await targetDb.c_TDataView.find(c_TDataView_cr.id) - const c_TDate_doc = await targetDb.c_TDate.find(c_TDate_cr.id) - const c_TError_doc = await targetDb.c_TError.find(c_TError_cr.id) - const c_TRegExp_doc = await targetDb.c_TRegExp.find(c_TRegExp_cr.id) - const c_TArray_doc = await targetDb.c_TArray.find(c_TArray_cr.id) - const c_TObject_doc = await targetDb.c_TObject.find(c_TObject_cr.id) - const c_TSet_doc = await targetDb.c_TSet.find(c_TSet_cr.id) - const c_TMap_doc = await targetDb.c_TMap.find(c_TMap_cr.id) + ); + const c_TBuffer_doc = await targetDb.c_TBuffer.find(c_TBuffer_cr.id); + const c_TDataView_doc = await targetDb.c_TDataView.find(c_TDataView_cr.id); + const c_TDate_doc = await targetDb.c_TDate.find(c_TDate_cr.id); + const c_TError_doc = await targetDb.c_TError.find(c_TError_cr.id); + const c_TRegExp_doc = await targetDb.c_TRegExp.find(c_TRegExp_cr.id); + const c_TArray_doc = await targetDb.c_TArray.find(c_TArray_cr.id); + const c_TObject_doc = await targetDb.c_TObject.find(c_TObject_cr.id); + const c_TSet_doc = await targetDb.c_TSet.find(c_TSet_cr.id); + const c_TMap_doc = await targetDb.c_TMap.find(c_TMap_cr.id); const s_TUndefined_doc = await targetDb.s_TUndefined.find( s_TUndefined_cr.id, - ) - const s_TNull_doc = await targetDb.s_TNull.find(s_TNull_cr.id) - const s_TNaN_doc = await targetDb.s_TNaN.find(s_TNaN_cr.id) - const s_TInfinity_doc = await targetDb.s_TInfinity.find(s_TInfinity_cr.id) - const s_TString_doc = await targetDb.s_TString.find(s_TString_cr.id) - const s_TNumber_doc = await targetDb.s_TNumber.find(s_TNumber_cr.id) - const s_TBigint_doc = await targetDb.s_TBigint.find(s_TBigint_cr.id) - const s_TKvU64_doc = await targetDb.s_TKvU64.find(s_TKvU64_cr.id) - const s_TBoolean_doc = await targetDb.s_TBoolean.find(s_TBoolean_cr.id) + ); + const s_TNull_doc = await targetDb.s_TNull.find(s_TNull_cr.id); + const s_TNaN_doc = await targetDb.s_TNaN.find(s_TNaN_cr.id); + const s_TInfinity_doc = await targetDb.s_TInfinity.find(s_TInfinity_cr.id); + const s_TString_doc = await targetDb.s_TString.find(s_TString_cr.id); + const s_TNumber_doc = await targetDb.s_TNumber.find(s_TNumber_cr.id); + const s_TBigint_doc = await targetDb.s_TBigint.find(s_TBigint_cr.id); + const s_TKvU64_doc = await targetDb.s_TKvU64.find(s_TKvU64_cr.id); + const s_TBoolean_doc = await targetDb.s_TBoolean.find(s_TBoolean_cr.id); const s_TInt8Array_doc = await targetDb.s_TInt8Array.find( s_TInt8Array_cr.id, - ) + ); const s_TInt16Array_doc = await targetDb.s_TInt16Array.find( s_TInt16Array_cr.id, - ) + ); const s_TInt32Array_doc = await targetDb.s_TInt32Array.find( s_TInt32Array_cr.id, - ) + ); const s_TBigInt64Array_doc = await targetDb.s_TBigInt64Array.find( s_TBigInt64Array_cr.id, - ) + ); const s_TUint8Array_doc = await targetDb.s_TUint8Array.find( s_TUint8Array_cr.id, - ) + ); const s_TUint16Array_doc = await targetDb.s_TUint16Array.find( s_TUint16Array_cr.id, - ) + ); const s_TUint32Array_doc = await targetDb.s_TUint32Array.find( s_TUint32Array_cr.id, - ) + ); const s_TBigUint64Array_doc = await targetDb.s_TBigUint64Array.find( s_TBigUint64Array_cr.id, - ) + ); const s_TUint8ClampedArray_doc = await targetDb.s_TUint8ClampedArray.find( s_TUint8ClampedArray_cr.id, - ) + ); const s_TFloat32Array_doc = await targetDb.s_TFloat32Array.find( s_TFloat32Array_cr.id, - ) + ); const s_TFloat64Array_doc = await targetDb.s_TFloat64Array.find( s_TFloat64Array_cr.id, - ) - const s_TBuffer_doc = await targetDb.s_TBuffer.find(s_TBuffer_cr.id) - const s_TDataView_doc = await targetDb.s_TDataView.find(s_TDataView_cr.id) - const s_TDate_doc = await targetDb.s_TDate.find(s_TDate_cr.id) - const s_TError_doc = await targetDb.s_TError.find(s_TError_cr.id) - const s_TRegExp_doc = await targetDb.s_TRegExp.find(s_TRegExp_cr.id) - const s_TArray_doc = await targetDb.s_TArray.find(s_TArray_cr.id) - const s_TObject_doc = await targetDb.s_TObject.find(s_TObject_cr.id) - const s_TSet_doc = await targetDb.s_TSet.find(s_TSet_cr.id) - const s_TMap_doc = await targetDb.s_TMap.find(s_TMap_cr.id) - const i_doc = await targetDb.i.find(i_cr.id) - const is_doc = await targetDb.is.find(is_cr.id) - const check = await targetKv.get(["check"]) + ); + const s_TBuffer_doc = await targetDb.s_TBuffer.find(s_TBuffer_cr.id); + const s_TDataView_doc = await targetDb.s_TDataView.find(s_TDataView_cr.id); + const s_TDate_doc = await targetDb.s_TDate.find(s_TDate_cr.id); + const s_TError_doc = await targetDb.s_TError.find(s_TError_cr.id); + const s_TRegExp_doc = await targetDb.s_TRegExp.find(s_TRegExp_cr.id); + const s_TArray_doc = await targetDb.s_TArray.find(s_TArray_cr.id); + const s_TObject_doc = await targetDb.s_TObject.find(s_TObject_cr.id); + const s_TSet_doc = await targetDb.s_TSet.find(s_TSet_cr.id); + const s_TMap_doc = await targetDb.s_TMap.find(s_TMap_cr.id); + const i_doc = await targetDb.i.find(i_cr.id); + const is_doc = await targetDb.is.find(is_cr.id); + const check = await targetKv.get(["check"]); - assertEquals(c_TUndefined_doc?.value, TUndefined) - assertEquals(c_TNull_doc?.value, TNull) - assertEquals(c_TNaN_doc?.value, TNaN) - assertEquals(c_TInfinity_doc?.value, TInfinity) - assertEquals(c_TString_doc?.value, TString) - assertEquals(c_TNumber_doc?.value, TNumber) - assertEquals(c_TBigint_doc?.value, TBigint) - assertEquals(c_TBoolean_doc?.value, TBoolean) - assertEquals(c_TKvU64_doc?.value, TKvU64) - assertEquals(c_TInt8Array_doc?.value, TInt8Array) - assertEquals(c_TInt16Array_doc?.value, TInt16Array) - assertEquals(c_TInt32Array_doc?.value, TInt32Array) - assertEquals(c_TBigInt64Array_doc?.value, TBigInt64Array) - assertEquals(c_TUint8Array_doc?.value, TUint8Array) - assertEquals(c_TUint16Array_doc?.value, TUint16Array) - assertEquals(c_TUint32Array_doc?.value, TUint32Array) - assertEquals(c_TBigUint64Array_doc?.value, TBigUint64Array) - assertEquals(c_TUint8ClampedArray_doc?.value, TUint8ClampedArray) - assertEquals(c_TFloat32Array_doc?.value, TFloat32Array) - assertEquals(c_TFloat64Array_doc?.value, TFloat64Array) - assertEquals(c_TBuffer_doc?.value, TBuffer) - assertEquals(c_TDataView_doc?.value, TDataView) - assertEquals(c_TDate_doc?.value, TDate) - assertEquals(c_TError_doc?.value, TError) - assertEquals(c_TRegExp_doc?.value, TRegExp) - assertEquals(c_TArray_doc?.value, TArray) - assertEquals(c_TObject_doc?.value, TObject) - assertEquals(c_TSet_doc?.value, TSet) - assertEquals(c_TMap_doc?.value, TMap) - assertEquals(s_TUndefined_doc?.value, TUndefined) - assertEquals(s_TNull_doc?.value, TNull) - assertEquals(s_TNaN_doc?.value, TNaN) - assertEquals(s_TInfinity_doc?.value, TInfinity) - assertEquals(s_TString_doc?.value, TString) - assertEquals(s_TNumber_doc?.value, TNumber) - assertEquals(s_TBigint_doc?.value, TBigint) - assertEquals(s_TBoolean_doc?.value, TBoolean) - assertEquals(s_TKvU64_doc?.value, TKvU64) - assertEquals(s_TInt8Array_doc?.value, TInt8Array) - assertEquals(s_TInt16Array_doc?.value, TInt16Array) - assertEquals(s_TInt32Array_doc?.value, TInt32Array) - assertEquals(s_TBigInt64Array_doc?.value, TBigInt64Array) - assertEquals(s_TUint8Array_doc?.value, TUint8Array) - assertEquals(s_TUint16Array_doc?.value, TUint16Array) - assertEquals(s_TUint32Array_doc?.value, TUint32Array) - assertEquals(s_TBigUint64Array_doc?.value, TBigUint64Array) - assertEquals(s_TUint8ClampedArray_doc?.value, TUint8ClampedArray) - assertEquals(s_TFloat32Array_doc?.value, TFloat32Array) - assertEquals(s_TFloat64Array_doc?.value, TFloat64Array) - assertEquals(s_TBuffer_doc?.value, TBuffer) - assertEquals(s_TDataView_doc?.value, TDataView) - assertEquals(s_TDate_doc?.value, TDate) - assertEquals(s_TError_doc?.value, TError) - assertEquals(s_TRegExp_doc?.value, TRegExp) - assertEquals(s_TArray_doc?.value, TArray) - assertEquals(s_TObject_doc?.value, TObject) - assertEquals(s_TSet_doc?.value, TSet) - assertEquals(s_TMap_doc?.value, TMap) - assertEquals(i_doc?.value, TObject) - assertEquals(is_doc?.value, TObject) - assert(!check.versionstamp && !check.value) + assertEquals(c_TUndefined_doc?.value, TUndefined); + assertEquals(c_TNull_doc?.value, TNull); + assertEquals(c_TNaN_doc?.value, TNaN); + assertEquals(c_TInfinity_doc?.value, TInfinity); + assertEquals(c_TString_doc?.value, TString); + assertEquals(c_TNumber_doc?.value, TNumber); + assertEquals(c_TBigint_doc?.value, TBigint); + assertEquals(c_TBoolean_doc?.value, TBoolean); + assertEquals(c_TKvU64_doc?.value, TKvU64); + assertEquals(c_TInt8Array_doc?.value, TInt8Array); + assertEquals(c_TInt16Array_doc?.value, TInt16Array); + assertEquals(c_TInt32Array_doc?.value, TInt32Array); + assertEquals(c_TBigInt64Array_doc?.value, TBigInt64Array); + assertEquals(c_TUint8Array_doc?.value, TUint8Array); + assertEquals(c_TUint16Array_doc?.value, TUint16Array); + assertEquals(c_TUint32Array_doc?.value, TUint32Array); + assertEquals(c_TBigUint64Array_doc?.value, TBigUint64Array); + assertEquals(c_TUint8ClampedArray_doc?.value, TUint8ClampedArray); + assertEquals(c_TFloat32Array_doc?.value, TFloat32Array); + assertEquals(c_TFloat64Array_doc?.value, TFloat64Array); + assertEquals(c_TBuffer_doc?.value, TBuffer); + assertEquals(c_TDataView_doc?.value, TDataView); + assertEquals(c_TDate_doc?.value, TDate); + assertEquals(c_TError_doc?.value, TError); + assertEquals(c_TRegExp_doc?.value, TRegExp); + assertEquals(c_TArray_doc?.value, TArray); + assertEquals(c_TObject_doc?.value, TObject); + assertEquals(c_TSet_doc?.value, TSet); + assertEquals(c_TMap_doc?.value, TMap); + assertEquals(s_TUndefined_doc?.value, TUndefined); + assertEquals(s_TNull_doc?.value, TNull); + assertEquals(s_TNaN_doc?.value, TNaN); + assertEquals(s_TInfinity_doc?.value, TInfinity); + assertEquals(s_TString_doc?.value, TString); + assertEquals(s_TNumber_doc?.value, TNumber); + assertEquals(s_TBigint_doc?.value, TBigint); + assertEquals(s_TBoolean_doc?.value, TBoolean); + assertEquals(s_TKvU64_doc?.value, TKvU64); + assertEquals(s_TInt8Array_doc?.value, TInt8Array); + assertEquals(s_TInt16Array_doc?.value, TInt16Array); + assertEquals(s_TInt32Array_doc?.value, TInt32Array); + assertEquals(s_TBigInt64Array_doc?.value, TBigInt64Array); + assertEquals(s_TUint8Array_doc?.value, TUint8Array); + assertEquals(s_TUint16Array_doc?.value, TUint16Array); + assertEquals(s_TUint32Array_doc?.value, TUint32Array); + assertEquals(s_TBigUint64Array_doc?.value, TBigUint64Array); + assertEquals(s_TUint8ClampedArray_doc?.value, TUint8ClampedArray); + assertEquals(s_TFloat32Array_doc?.value, TFloat32Array); + assertEquals(s_TFloat64Array_doc?.value, TFloat64Array); + assertEquals(s_TBuffer_doc?.value, TBuffer); + assertEquals(s_TDataView_doc?.value, TDataView); + assertEquals(s_TDate_doc?.value, TDate); + assertEquals(s_TError_doc?.value, TError); + assertEquals(s_TRegExp_doc?.value, TRegExp); + assertEquals(s_TArray_doc?.value, TArray); + assertEquals(s_TObject_doc?.value, TObject); + assertEquals(s_TSet_doc?.value, TSet); + assertEquals(s_TMap_doc?.value, TMap); + assertEquals(i_doc?.value, TObject); + assertEquals(is_doc?.value, TObject); + assert(!check.versionstamp && !check.value); - sourceKv.close() - targetKv.close() + sourceKv.close(); + targetKv.close(); - await Deno.remove(temp) - }) + await Deno.remove(temp); + }); await t.step("Should migrate all entries", async () => { - const temp = await Deno.makeTempFile({ suffix: ".sqlite3" }) - using sourceKv = await Deno.openKv(":memory:") - using targetKv = await Deno.openKv(temp) + const temp = await Deno.makeTempFile({ suffix: ".sqlite3" }); + using sourceKv = await Deno.openKv(":memory:"); + using targetKv = await Deno.openKv(temp); - const check_cr = await sourceKv.set(["check"], "check") - assert(check_cr.ok) + const check_cr = await sourceKv.set(["check"], "check"); + assert(check_cr.ok); - const sourceDb = createDb(sourceKv) + const sourceDb = createDb(sourceKv); - const c_TUndefined_cr = await sourceDb.c_TUndefined.add(TUndefined) - const c_TNull_cr = await sourceDb.c_TNull.add(TNull) - const c_TNaN_cr = await sourceDb.c_TNaN.add(TNaN) - const c_TInfinity_cr = await sourceDb.c_TInfinity.add(TInfinity) - const c_TNumber_cr = await sourceDb.c_TNumber.add(TNumber) - const c_TString_cr = await sourceDb.c_TString.add(TString) - const c_TKvU64_cr = await sourceDb.c_TKvU64.add(TKvU64) - const c_TBoolean_cr = await sourceDb.c_TBoolean.add(TBoolean) - const c_TBigint_cr = await sourceDb.c_TBigint.add(TBigint) - const c_TInt8Array_cr = await sourceDb.c_TInt8Array.add(TInt8Array) - const c_TInt16Array_cr = await sourceDb.c_TInt16Array.add(TInt16Array) - const c_TInt32Array_cr = await sourceDb.c_TInt32Array.add(TInt32Array) + const c_TUndefined_cr = await sourceDb.c_TUndefined.add(TUndefined); + const c_TNull_cr = await sourceDb.c_TNull.add(TNull); + const c_TNaN_cr = await sourceDb.c_TNaN.add(TNaN); + const c_TInfinity_cr = await sourceDb.c_TInfinity.add(TInfinity); + const c_TNumber_cr = await sourceDb.c_TNumber.add(TNumber); + const c_TString_cr = await sourceDb.c_TString.add(TString); + const c_TKvU64_cr = await sourceDb.c_TKvU64.add(TKvU64); + const c_TBoolean_cr = await sourceDb.c_TBoolean.add(TBoolean); + const c_TBigint_cr = await sourceDb.c_TBigint.add(TBigint); + const c_TInt8Array_cr = await sourceDb.c_TInt8Array.add(TInt8Array); + const c_TInt16Array_cr = await sourceDb.c_TInt16Array.add(TInt16Array); + const c_TInt32Array_cr = await sourceDb.c_TInt32Array.add(TInt32Array); const c_TBigInt64Array_cr = await sourceDb.c_TBigInt64Array.add( TBigInt64Array, - ) - const c_TUint8Array_cr = await sourceDb.c_TUint8Array.add(TUint8Array) - const c_TUint16Array_cr = await sourceDb.c_TUint16Array.add(TUint16Array) - const c_TUint32Array_cr = await sourceDb.c_TUint32Array.add(TUint32Array) + ); + const c_TUint8Array_cr = await sourceDb.c_TUint8Array.add(TUint8Array); + const c_TUint16Array_cr = await sourceDb.c_TUint16Array.add(TUint16Array); + const c_TUint32Array_cr = await sourceDb.c_TUint32Array.add(TUint32Array); const c_TBigUint64Array_cr = await sourceDb.c_TBigUint64Array.add( TBigUint64Array, - ) + ); const c_TUint8ClampedArray_cr = await sourceDb.c_TUint8ClampedArray.add( TUint8ClampedArray, - ) - const c_TFloat32Array_cr = await sourceDb.c_TFloat32Array.add(TFloat32Array) - const c_TFloat64Array_cr = await sourceDb.c_TFloat64Array.add(TFloat64Array) - const c_TBuffer_cr = await sourceDb.c_TBuffer.add(TBuffer) - const c_TDataView_cr = await sourceDb.c_TDataView.add(TDataView) - const c_TDate_cr = await sourceDb.c_TDate.add(TDate) - const c_TRegExp_cr = await sourceDb.c_TRegExp.add(TRegExp) - const c_TError_cr = await sourceDb.c_TError.add(TError) - const c_TArray_cr = await sourceDb.c_TArray.add(TArray) - const c_TObject_cr = await sourceDb.c_TObject.add(TObject) - const c_TSet_cr = await sourceDb.c_TSet.add(TSet) - const c_TMap_cr = await sourceDb.c_TMap.add(TMap) - const s_TUndefined_cr = await sourceDb.s_TUndefined.add(TUndefined) - const s_TNull_cr = await sourceDb.s_TNull.add(TNull) - const s_TNaN_cr = await sourceDb.s_TNaN.add(TNaN) - const s_TInfinity_cr = await sourceDb.s_TInfinity.add(TInfinity) - const s_TNumber_cr = await sourceDb.s_TNumber.add(TNumber) - const s_TString_cr = await sourceDb.s_TString.add(TString) - const s_TKvU64_cr = await sourceDb.s_TKvU64.add(TKvU64) - const s_TBoolean_cr = await sourceDb.s_TBoolean.add(TBoolean) - const s_TBigint_cr = await sourceDb.s_TBigint.add(TBigint) - const s_TInt8Array_cr = await sourceDb.s_TInt8Array.add(TInt8Array) - const s_TInt16Array_cr = await sourceDb.s_TInt16Array.add(TInt16Array) - const s_TInt32Array_cr = await sourceDb.s_TInt32Array.add(TInt32Array) + ); + const c_TFloat32Array_cr = await sourceDb.c_TFloat32Array.add( + TFloat32Array, + ); + const c_TFloat64Array_cr = await sourceDb.c_TFloat64Array.add( + TFloat64Array, + ); + const c_TBuffer_cr = await sourceDb.c_TBuffer.add(TBuffer); + const c_TDataView_cr = await sourceDb.c_TDataView.add(TDataView); + const c_TDate_cr = await sourceDb.c_TDate.add(TDate); + const c_TRegExp_cr = await sourceDb.c_TRegExp.add(TRegExp); + const c_TError_cr = await sourceDb.c_TError.add(TError); + const c_TArray_cr = await sourceDb.c_TArray.add(TArray); + const c_TObject_cr = await sourceDb.c_TObject.add(TObject); + const c_TSet_cr = await sourceDb.c_TSet.add(TSet); + const c_TMap_cr = await sourceDb.c_TMap.add(TMap); + const s_TUndefined_cr = await sourceDb.s_TUndefined.add(TUndefined); + const s_TNull_cr = await sourceDb.s_TNull.add(TNull); + const s_TNaN_cr = await sourceDb.s_TNaN.add(TNaN); + const s_TInfinity_cr = await sourceDb.s_TInfinity.add(TInfinity); + const s_TNumber_cr = await sourceDb.s_TNumber.add(TNumber); + const s_TString_cr = await sourceDb.s_TString.add(TString); + const s_TKvU64_cr = await sourceDb.s_TKvU64.add(TKvU64); + const s_TBoolean_cr = await sourceDb.s_TBoolean.add(TBoolean); + const s_TBigint_cr = await sourceDb.s_TBigint.add(TBigint); + const s_TInt8Array_cr = await sourceDb.s_TInt8Array.add(TInt8Array); + const s_TInt16Array_cr = await sourceDb.s_TInt16Array.add(TInt16Array); + const s_TInt32Array_cr = await sourceDb.s_TInt32Array.add(TInt32Array); const s_TBigInt64Array_cr = await sourceDb.s_TBigInt64Array.add( TBigInt64Array, - ) - const s_TUint8Array_cr = await sourceDb.s_TUint8Array.add(TUint8Array) - const s_TUint16Array_cr = await sourceDb.s_TUint16Array.add(TUint16Array) - const s_TUint32Array_cr = await sourceDb.s_TUint32Array.add(TUint32Array) + ); + const s_TUint8Array_cr = await sourceDb.s_TUint8Array.add(TUint8Array); + const s_TUint16Array_cr = await sourceDb.s_TUint16Array.add(TUint16Array); + const s_TUint32Array_cr = await sourceDb.s_TUint32Array.add(TUint32Array); const s_TBigUint64Array_cr = await sourceDb.s_TBigUint64Array.add( TBigUint64Array, - ) + ); const s_TUint8ClampedArray_cr = await sourceDb.s_TUint8ClampedArray.add( TUint8ClampedArray, - ) - const s_TFloat32Array_cr = await sourceDb.s_TFloat32Array.add(TFloat32Array) - const s_TFloat64Array_cr = await sourceDb.s_TFloat64Array.add(TFloat64Array) - const s_TBuffer_cr = await sourceDb.s_TBuffer.add(TBuffer) - const s_TDataView_cr = await sourceDb.s_TDataView.add(TDataView) - const s_TDate_cr = await sourceDb.s_TDate.add(TDate) - const s_TRegExp_cr = await sourceDb.s_TRegExp.add(TRegExp) - const s_TError_cr = await sourceDb.s_TError.add(TError) - const s_TArray_cr = await sourceDb.s_TArray.add(TArray) - const s_TObject_cr = await sourceDb.s_TObject.add(TObject) - const s_TSet_cr = await sourceDb.s_TSet.add(TSet) - const s_TMap_cr = await sourceDb.s_TMap.add(TMap) - const i_cr = await sourceDb.i.add(TObject) - const is_cr = await sourceDb.is.add(TObject) + ); + const s_TFloat32Array_cr = await sourceDb.s_TFloat32Array.add( + TFloat32Array, + ); + const s_TFloat64Array_cr = await sourceDb.s_TFloat64Array.add( + TFloat64Array, + ); + const s_TBuffer_cr = await sourceDb.s_TBuffer.add(TBuffer); + const s_TDataView_cr = await sourceDb.s_TDataView.add(TDataView); + const s_TDate_cr = await sourceDb.s_TDate.add(TDate); + const s_TRegExp_cr = await sourceDb.s_TRegExp.add(TRegExp); + const s_TError_cr = await sourceDb.s_TError.add(TError); + const s_TArray_cr = await sourceDb.s_TArray.add(TArray); + const s_TObject_cr = await sourceDb.s_TObject.add(TObject); + const s_TSet_cr = await sourceDb.s_TSet.add(TSet); + const s_TMap_cr = await sourceDb.s_TMap.add(TMap); + const i_cr = await sourceDb.i.add(TObject); + const is_cr = await sourceDb.is.add(TObject); - assert(c_TUndefined_cr.ok) - assert(c_TNull_cr.ok) - assert(c_TNaN_cr.ok) - assert(c_TInfinity_cr.ok) - assert(c_TString_cr.ok) - assert(c_TNumber_cr.ok) - assert(c_TBigint_cr.ok) - assert(c_TBoolean_cr.ok) - assert(c_TKvU64_cr.ok) - assert(c_TUint8Array_cr.ok) - assert(c_TUint16Array_cr.ok) - assert(c_TUint32Array_cr.ok) - assert(c_TBigUint64Array_cr.ok) - assert(c_TUint8ClampedArray_cr.ok) - assert(c_TInt8Array_cr.ok) - assert(c_TInt16Array_cr.ok) - assert(c_TInt32Array_cr.ok) - assert(c_TBigInt64Array_cr.ok) - assert(c_TBuffer_cr.ok) - assert(c_TDataView_cr.ok) - assert(c_TDate_cr.ok) - assert(c_TError_cr.ok) - assert(c_TRegExp_cr.ok) - assert(c_TFloat32Array_cr.ok) - assert(c_TFloat64Array_cr.ok) - assert(c_TArray_cr.ok) - assert(c_TObject_cr.ok) - assert(c_TSet_cr.ok) - assert(c_TMap_cr.ok) - assert(s_TUndefined_cr.ok) - assert(s_TNull_cr.ok) - assert(s_TNaN_cr.ok) - assert(s_TInfinity_cr.ok) - assert(s_TString_cr.ok) - assert(s_TNumber_cr.ok) - assert(s_TBigint_cr.ok) - assert(s_TBoolean_cr.ok) - assert(s_TKvU64_cr.ok) - assert(s_TUint8Array_cr.ok) - assert(s_TUint16Array_cr.ok) - assert(s_TUint32Array_cr.ok) - assert(s_TBigUint64Array_cr.ok) - assert(s_TUint8ClampedArray_cr.ok) - assert(s_TInt8Array_cr.ok) - assert(s_TInt16Array_cr.ok) - assert(s_TInt32Array_cr.ok) - assert(s_TBigInt64Array_cr.ok) - assert(s_TBuffer_cr.ok) - assert(s_TDataView_cr.ok) - assert(s_TDate_cr.ok) - assert(s_TError_cr.ok) - assert(s_TRegExp_cr.ok) - assert(s_TFloat32Array_cr.ok) - assert(s_TFloat64Array_cr.ok) - assert(s_TArray_cr.ok) - assert(s_TObject_cr.ok) - assert(s_TSet_cr.ok) - assert(s_TMap_cr.ok) - assert(i_cr.ok) - assert(is_cr.ok) + assert(c_TUndefined_cr.ok); + assert(c_TNull_cr.ok); + assert(c_TNaN_cr.ok); + assert(c_TInfinity_cr.ok); + assert(c_TString_cr.ok); + assert(c_TNumber_cr.ok); + assert(c_TBigint_cr.ok); + assert(c_TBoolean_cr.ok); + assert(c_TKvU64_cr.ok); + assert(c_TUint8Array_cr.ok); + assert(c_TUint16Array_cr.ok); + assert(c_TUint32Array_cr.ok); + assert(c_TBigUint64Array_cr.ok); + assert(c_TUint8ClampedArray_cr.ok); + assert(c_TInt8Array_cr.ok); + assert(c_TInt16Array_cr.ok); + assert(c_TInt32Array_cr.ok); + assert(c_TBigInt64Array_cr.ok); + assert(c_TBuffer_cr.ok); + assert(c_TDataView_cr.ok); + assert(c_TDate_cr.ok); + assert(c_TError_cr.ok); + assert(c_TRegExp_cr.ok); + assert(c_TFloat32Array_cr.ok); + assert(c_TFloat64Array_cr.ok); + assert(c_TArray_cr.ok); + assert(c_TObject_cr.ok); + assert(c_TSet_cr.ok); + assert(c_TMap_cr.ok); + assert(s_TUndefined_cr.ok); + assert(s_TNull_cr.ok); + assert(s_TNaN_cr.ok); + assert(s_TInfinity_cr.ok); + assert(s_TString_cr.ok); + assert(s_TNumber_cr.ok); + assert(s_TBigint_cr.ok); + assert(s_TBoolean_cr.ok); + assert(s_TKvU64_cr.ok); + assert(s_TUint8Array_cr.ok); + assert(s_TUint16Array_cr.ok); + assert(s_TUint32Array_cr.ok); + assert(s_TBigUint64Array_cr.ok); + assert(s_TUint8ClampedArray_cr.ok); + assert(s_TInt8Array_cr.ok); + assert(s_TInt16Array_cr.ok); + assert(s_TInt32Array_cr.ok); + assert(s_TBigInt64Array_cr.ok); + assert(s_TBuffer_cr.ok); + assert(s_TDataView_cr.ok); + assert(s_TDate_cr.ok); + assert(s_TError_cr.ok); + assert(s_TRegExp_cr.ok); + assert(s_TFloat32Array_cr.ok); + assert(s_TFloat64Array_cr.ok); + assert(s_TArray_cr.ok); + assert(s_TObject_cr.ok); + assert(s_TSet_cr.ok); + assert(s_TMap_cr.ok); + assert(i_cr.ok); + assert(is_cr.ok); await migrate({ source: sourceKv, target: targetKv, all: true, - }) + }); - const targetDb = createDb(targetKv) + const targetDb = createDb(targetKv); const c_TUndefined_doc = await targetDb.c_TUndefined.find( c_TUndefined_cr.id, - ) - const c_TNull_doc = await targetDb.c_TNull.find(c_TNull_cr.id) - const c_TNaN_doc = await targetDb.c_TNaN.find(c_TNaN_cr.id) - const c_TInfinity_doc = await targetDb.c_TInfinity.find(c_TInfinity_cr.id) - const c_TString_doc = await targetDb.c_TString.find(c_TString_cr.id) - const c_TNumber_doc = await targetDb.c_TNumber.find(c_TNumber_cr.id) - const c_TBigint_doc = await targetDb.c_TBigint.find(c_TBigint_cr.id) - const c_TKvU64_doc = await targetDb.c_TKvU64.find(c_TKvU64_cr.id) - const c_TBoolean_doc = await targetDb.c_TBoolean.find(c_TBoolean_cr.id) + ); + const c_TNull_doc = await targetDb.c_TNull.find(c_TNull_cr.id); + const c_TNaN_doc = await targetDb.c_TNaN.find(c_TNaN_cr.id); + const c_TInfinity_doc = await targetDb.c_TInfinity.find(c_TInfinity_cr.id); + const c_TString_doc = await targetDb.c_TString.find(c_TString_cr.id); + const c_TNumber_doc = await targetDb.c_TNumber.find(c_TNumber_cr.id); + const c_TBigint_doc = await targetDb.c_TBigint.find(c_TBigint_cr.id); + const c_TKvU64_doc = await targetDb.c_TKvU64.find(c_TKvU64_cr.id); + const c_TBoolean_doc = await targetDb.c_TBoolean.find(c_TBoolean_cr.id); const c_TInt8Array_doc = await targetDb.c_TInt8Array.find( c_TInt8Array_cr.id, - ) + ); const c_TInt16Array_doc = await targetDb.c_TInt16Array.find( c_TInt16Array_cr.id, - ) + ); const c_TInt32Array_doc = await targetDb.c_TInt32Array.find( c_TInt32Array_cr.id, - ) + ); const c_TBigInt64Array_doc = await targetDb.c_TBigInt64Array.find( c_TBigInt64Array_cr.id, - ) + ); const c_TUint8Array_doc = await targetDb.c_TUint8Array.find( c_TUint8Array_cr.id, - ) + ); const c_TUint16Array_doc = await targetDb.c_TUint16Array.find( c_TUint16Array_cr.id, - ) + ); const c_TUint32Array_doc = await targetDb.c_TUint32Array.find( c_TUint32Array_cr.id, - ) + ); const c_TBigUint64Array_doc = await targetDb.c_TBigUint64Array.find( c_TBigUint64Array_cr.id, - ) + ); const c_TUint8ClampedArray_doc = await targetDb.c_TUint8ClampedArray.find( c_TUint8ClampedArray_cr.id, - ) + ); const c_TFloat32Array_doc = await targetDb.c_TFloat32Array.find( c_TFloat32Array_cr.id, - ) + ); const c_TFloat64Array_doc = await targetDb.c_TFloat64Array.find( c_TFloat64Array_cr.id, - ) - const c_TBuffer_doc = await targetDb.c_TBuffer.find(c_TBuffer_cr.id) - const c_TDataView_doc = await targetDb.c_TDataView.find(c_TDataView_cr.id) - const c_TDate_doc = await targetDb.c_TDate.find(c_TDate_cr.id) - const c_TError_doc = await targetDb.c_TError.find(c_TError_cr.id) - const c_TRegExp_doc = await targetDb.c_TRegExp.find(c_TRegExp_cr.id) - const c_TArray_doc = await targetDb.c_TArray.find(c_TArray_cr.id) - const c_TObject_doc = await targetDb.c_TObject.find(c_TObject_cr.id) - const c_TSet_doc = await targetDb.c_TSet.find(c_TSet_cr.id) - const c_TMap_doc = await targetDb.c_TMap.find(c_TMap_cr.id) + ); + const c_TBuffer_doc = await targetDb.c_TBuffer.find(c_TBuffer_cr.id); + const c_TDataView_doc = await targetDb.c_TDataView.find(c_TDataView_cr.id); + const c_TDate_doc = await targetDb.c_TDate.find(c_TDate_cr.id); + const c_TError_doc = await targetDb.c_TError.find(c_TError_cr.id); + const c_TRegExp_doc = await targetDb.c_TRegExp.find(c_TRegExp_cr.id); + const c_TArray_doc = await targetDb.c_TArray.find(c_TArray_cr.id); + const c_TObject_doc = await targetDb.c_TObject.find(c_TObject_cr.id); + const c_TSet_doc = await targetDb.c_TSet.find(c_TSet_cr.id); + const c_TMap_doc = await targetDb.c_TMap.find(c_TMap_cr.id); const s_TUndefined_doc = await targetDb.s_TUndefined.find( s_TUndefined_cr.id, - ) - const s_TNull_doc = await targetDb.s_TNull.find(s_TNull_cr.id) - const s_TNaN_doc = await targetDb.s_TNaN.find(s_TNaN_cr.id) - const s_TInfinity_doc = await targetDb.s_TInfinity.find(s_TInfinity_cr.id) - const s_TString_doc = await targetDb.s_TString.find(s_TString_cr.id) - const s_TNumber_doc = await targetDb.s_TNumber.find(s_TNumber_cr.id) - const s_TBigint_doc = await targetDb.s_TBigint.find(s_TBigint_cr.id) - const s_TKvU64_doc = await targetDb.s_TKvU64.find(s_TKvU64_cr.id) - const s_TBoolean_doc = await targetDb.s_TBoolean.find(s_TBoolean_cr.id) + ); + const s_TNull_doc = await targetDb.s_TNull.find(s_TNull_cr.id); + const s_TNaN_doc = await targetDb.s_TNaN.find(s_TNaN_cr.id); + const s_TInfinity_doc = await targetDb.s_TInfinity.find(s_TInfinity_cr.id); + const s_TString_doc = await targetDb.s_TString.find(s_TString_cr.id); + const s_TNumber_doc = await targetDb.s_TNumber.find(s_TNumber_cr.id); + const s_TBigint_doc = await targetDb.s_TBigint.find(s_TBigint_cr.id); + const s_TKvU64_doc = await targetDb.s_TKvU64.find(s_TKvU64_cr.id); + const s_TBoolean_doc = await targetDb.s_TBoolean.find(s_TBoolean_cr.id); const s_TInt8Array_doc = await targetDb.s_TInt8Array.find( s_TInt8Array_cr.id, - ) + ); const s_TInt16Array_doc = await targetDb.s_TInt16Array.find( s_TInt16Array_cr.id, - ) + ); const s_TInt32Array_doc = await targetDb.s_TInt32Array.find( s_TInt32Array_cr.id, - ) + ); const s_TBigInt64Array_doc = await targetDb.s_TBigInt64Array.find( s_TBigInt64Array_cr.id, - ) + ); const s_TUint8Array_doc = await targetDb.s_TUint8Array.find( s_TUint8Array_cr.id, - ) + ); const s_TUint16Array_doc = await targetDb.s_TUint16Array.find( s_TUint16Array_cr.id, - ) + ); const s_TUint32Array_doc = await targetDb.s_TUint32Array.find( s_TUint32Array_cr.id, - ) + ); const s_TBigUint64Array_doc = await targetDb.s_TBigUint64Array.find( s_TBigUint64Array_cr.id, - ) + ); const s_TUint8ClampedArray_doc = await targetDb.s_TUint8ClampedArray.find( s_TUint8ClampedArray_cr.id, - ) + ); const s_TFloat32Array_doc = await targetDb.s_TFloat32Array.find( s_TFloat32Array_cr.id, - ) + ); const s_TFloat64Array_doc = await targetDb.s_TFloat64Array.find( s_TFloat64Array_cr.id, - ) - const s_TBuffer_doc = await targetDb.s_TBuffer.find(s_TBuffer_cr.id) - const s_TDataView_doc = await targetDb.s_TDataView.find(s_TDataView_cr.id) - const s_TDate_doc = await targetDb.s_TDate.find(s_TDate_cr.id) - const s_TError_doc = await targetDb.s_TError.find(s_TError_cr.id) - const s_TRegExp_doc = await targetDb.s_TRegExp.find(s_TRegExp_cr.id) - const s_TArray_doc = await targetDb.s_TArray.find(s_TArray_cr.id) - const s_TObject_doc = await targetDb.s_TObject.find(s_TObject_cr.id) - const s_TSet_doc = await targetDb.s_TSet.find(s_TSet_cr.id) - const s_TMap_doc = await targetDb.s_TMap.find(s_TMap_cr.id) - const i_doc = await targetDb.i.find(i_cr.id) - const is_doc = await targetDb.is.find(is_cr.id) - const check = await targetKv.get(["check"]) + ); + const s_TBuffer_doc = await targetDb.s_TBuffer.find(s_TBuffer_cr.id); + const s_TDataView_doc = await targetDb.s_TDataView.find(s_TDataView_cr.id); + const s_TDate_doc = await targetDb.s_TDate.find(s_TDate_cr.id); + const s_TError_doc = await targetDb.s_TError.find(s_TError_cr.id); + const s_TRegExp_doc = await targetDb.s_TRegExp.find(s_TRegExp_cr.id); + const s_TArray_doc = await targetDb.s_TArray.find(s_TArray_cr.id); + const s_TObject_doc = await targetDb.s_TObject.find(s_TObject_cr.id); + const s_TSet_doc = await targetDb.s_TSet.find(s_TSet_cr.id); + const s_TMap_doc = await targetDb.s_TMap.find(s_TMap_cr.id); + const i_doc = await targetDb.i.find(i_cr.id); + const is_doc = await targetDb.is.find(is_cr.id); + const check = await targetKv.get(["check"]); - assertEquals(c_TUndefined_doc?.value, TUndefined) - assertEquals(c_TNull_doc?.value, TNull) - assertEquals(c_TNaN_doc?.value, TNaN) - assertEquals(c_TInfinity_doc?.value, TInfinity) - assertEquals(c_TString_doc?.value, TString) - assertEquals(c_TNumber_doc?.value, TNumber) - assertEquals(c_TBigint_doc?.value, TBigint) - assertEquals(c_TBoolean_doc?.value, TBoolean) - assertEquals(c_TKvU64_doc?.value, TKvU64) - assertEquals(c_TInt8Array_doc?.value, TInt8Array) - assertEquals(c_TInt16Array_doc?.value, TInt16Array) - assertEquals(c_TInt32Array_doc?.value, TInt32Array) - assertEquals(c_TBigInt64Array_doc?.value, TBigInt64Array) - assertEquals(c_TUint8Array_doc?.value, TUint8Array) - assertEquals(c_TUint16Array_doc?.value, TUint16Array) - assertEquals(c_TUint32Array_doc?.value, TUint32Array) - assertEquals(c_TBigUint64Array_doc?.value, TBigUint64Array) - assertEquals(c_TUint8ClampedArray_doc?.value, TUint8ClampedArray) - assertEquals(c_TFloat32Array_doc?.value, TFloat32Array) - assertEquals(c_TFloat64Array_doc?.value, TFloat64Array) - assertEquals(c_TBuffer_doc?.value, TBuffer) - assertEquals(c_TDataView_doc?.value, TDataView) - assertEquals(c_TDate_doc?.value, TDate) - assertEquals(c_TError_doc?.value, TError) - assertEquals(c_TRegExp_doc?.value, TRegExp) - assertEquals(c_TArray_doc?.value, TArray) - assertEquals(c_TObject_doc?.value, TObject) - assertEquals(c_TSet_doc?.value, TSet) - assertEquals(c_TMap_doc?.value, TMap) - assertEquals(s_TUndefined_doc?.value, TUndefined) - assertEquals(s_TNull_doc?.value, TNull) - assertEquals(s_TNaN_doc?.value, TNaN) - assertEquals(s_TInfinity_doc?.value, TInfinity) - assertEquals(s_TString_doc?.value, TString) - assertEquals(s_TNumber_doc?.value, TNumber) - assertEquals(s_TBigint_doc?.value, TBigint) - assertEquals(s_TBoolean_doc?.value, TBoolean) - assertEquals(s_TKvU64_doc?.value, TKvU64) - assertEquals(s_TInt8Array_doc?.value, TInt8Array) - assertEquals(s_TInt16Array_doc?.value, TInt16Array) - assertEquals(s_TInt32Array_doc?.value, TInt32Array) - assertEquals(s_TBigInt64Array_doc?.value, TBigInt64Array) - assertEquals(s_TUint8Array_doc?.value, TUint8Array) - assertEquals(s_TUint16Array_doc?.value, TUint16Array) - assertEquals(s_TUint32Array_doc?.value, TUint32Array) - assertEquals(s_TBigUint64Array_doc?.value, TBigUint64Array) - assertEquals(s_TUint8ClampedArray_doc?.value, TUint8ClampedArray) - assertEquals(s_TFloat32Array_doc?.value, TFloat32Array) - assertEquals(s_TFloat64Array_doc?.value, TFloat64Array) - assertEquals(s_TBuffer_doc?.value, TBuffer) - assertEquals(s_TDataView_doc?.value, TDataView) - assertEquals(s_TDate_doc?.value, TDate) - assertEquals(s_TError_doc?.value, TError) - assertEquals(s_TRegExp_doc?.value, TRegExp) - assertEquals(s_TArray_doc?.value, TArray) - assertEquals(s_TObject_doc?.value, TObject) - assertEquals(s_TSet_doc?.value, TSet) - assertEquals(s_TMap_doc?.value, TMap) - assertEquals(i_doc?.value, TObject) - assertEquals(is_doc?.value, TObject) - assert(!!check.versionstamp && !!check.value) + assertEquals(c_TUndefined_doc?.value, TUndefined); + assertEquals(c_TNull_doc?.value, TNull); + assertEquals(c_TNaN_doc?.value, TNaN); + assertEquals(c_TInfinity_doc?.value, TInfinity); + assertEquals(c_TString_doc?.value, TString); + assertEquals(c_TNumber_doc?.value, TNumber); + assertEquals(c_TBigint_doc?.value, TBigint); + assertEquals(c_TBoolean_doc?.value, TBoolean); + assertEquals(c_TKvU64_doc?.value, TKvU64); + assertEquals(c_TInt8Array_doc?.value, TInt8Array); + assertEquals(c_TInt16Array_doc?.value, TInt16Array); + assertEquals(c_TInt32Array_doc?.value, TInt32Array); + assertEquals(c_TBigInt64Array_doc?.value, TBigInt64Array); + assertEquals(c_TUint8Array_doc?.value, TUint8Array); + assertEquals(c_TUint16Array_doc?.value, TUint16Array); + assertEquals(c_TUint32Array_doc?.value, TUint32Array); + assertEquals(c_TBigUint64Array_doc?.value, TBigUint64Array); + assertEquals(c_TUint8ClampedArray_doc?.value, TUint8ClampedArray); + assertEquals(c_TFloat32Array_doc?.value, TFloat32Array); + assertEquals(c_TFloat64Array_doc?.value, TFloat64Array); + assertEquals(c_TBuffer_doc?.value, TBuffer); + assertEquals(c_TDataView_doc?.value, TDataView); + assertEquals(c_TDate_doc?.value, TDate); + assertEquals(c_TError_doc?.value, TError); + assertEquals(c_TRegExp_doc?.value, TRegExp); + assertEquals(c_TArray_doc?.value, TArray); + assertEquals(c_TObject_doc?.value, TObject); + assertEquals(c_TSet_doc?.value, TSet); + assertEquals(c_TMap_doc?.value, TMap); + assertEquals(s_TUndefined_doc?.value, TUndefined); + assertEquals(s_TNull_doc?.value, TNull); + assertEquals(s_TNaN_doc?.value, TNaN); + assertEquals(s_TInfinity_doc?.value, TInfinity); + assertEquals(s_TString_doc?.value, TString); + assertEquals(s_TNumber_doc?.value, TNumber); + assertEquals(s_TBigint_doc?.value, TBigint); + assertEquals(s_TBoolean_doc?.value, TBoolean); + assertEquals(s_TKvU64_doc?.value, TKvU64); + assertEquals(s_TInt8Array_doc?.value, TInt8Array); + assertEquals(s_TInt16Array_doc?.value, TInt16Array); + assertEquals(s_TInt32Array_doc?.value, TInt32Array); + assertEquals(s_TBigInt64Array_doc?.value, TBigInt64Array); + assertEquals(s_TUint8Array_doc?.value, TUint8Array); + assertEquals(s_TUint16Array_doc?.value, TUint16Array); + assertEquals(s_TUint32Array_doc?.value, TUint32Array); + assertEquals(s_TBigUint64Array_doc?.value, TBigUint64Array); + assertEquals(s_TUint8ClampedArray_doc?.value, TUint8ClampedArray); + assertEquals(s_TFloat32Array_doc?.value, TFloat32Array); + assertEquals(s_TFloat64Array_doc?.value, TFloat64Array); + assertEquals(s_TBuffer_doc?.value, TBuffer); + assertEquals(s_TDataView_doc?.value, TDataView); + assertEquals(s_TDate_doc?.value, TDate); + assertEquals(s_TError_doc?.value, TError); + assertEquals(s_TRegExp_doc?.value, TRegExp); + assertEquals(s_TArray_doc?.value, TArray); + assertEquals(s_TObject_doc?.value, TObject); + assertEquals(s_TSet_doc?.value, TSet); + assertEquals(s_TMap_doc?.value, TMap); + assertEquals(i_doc?.value, TObject); + assertEquals(is_doc?.value, TObject); + assert(!!check.versionstamp && !!check.value); - sourceKv.close() - targetKv.close() + sourceKv.close(); + targetKv.close(); - await Deno.remove(temp) - }) -}) + await Deno.remove(temp); + }); +}); diff --git a/tests/ext/zod.test.ts b/tests/ext/zod.test.ts index 910e1de..f133691 100644 --- a/tests/ext/zod.test.ts +++ b/tests/ext/zod.test.ts @@ -1,13 +1,13 @@ -import { assert, z } from "../test.deps.ts" +import { assert, z } from "../test.deps.ts"; import { KvArraySchema, KvIdSchema, KvObjectSchema, KvValueSchema, -} from "../../src/ext/zod/mod.ts" -import { collection, kvdex } from "../../mod.ts" -import { useKv } from "../utils.ts" -import { VALUES } from "../values.ts" +} from "../../src/ext/zod/mod.ts"; +import { collection, kvdex } from "../../mod.ts"; +import { useKv } from "../utils.ts"; +import { VALUES } from "../values.ts"; const UserSchema = z.object({ username: z.string(), @@ -19,25 +19,25 @@ const UserSchema = z.object({ .default({ city: "Bergen", }), -}) +}); const kvObject = { string: "test", number: 10, boolean: true, bigint: 10n, -} +}; const notKvObject = { test1: Symbol("test1"), [Symbol("test2")]: "test2", -} +}; -const notKvArray = [Symbol("test")] +const notKvArray = [Symbol("test")]; -const notKvValues = [Symbol(), notKvObject, notKvArray] +const notKvValues = [Symbol(), notKvObject, notKvArray]; -const kvArray = ["test", 10, true, 10n] +const kvArray = ["test", 10, true, 10n]; Deno.test("ext - zod", async (t) => { await t.step("Should correctly parse insert model", async () => { @@ -51,37 +51,37 @@ Deno.test("ext - zod", async (t) => { }, }), s_users: collection(UserSchema, { serialize: "json" }), - }) + }); const cr1 = await db.users.add({ username: "oliver", address: { city: "Bergen", }, - }) + }); const cr2 = await db.i_users.add({ username: "oliver", address: { city: "Bergen", }, - }) + }); const cr3 = await db.s_users.add({ username: "oliver", address: { city: "Bergen", }, - }) + }); - const count = await db.countAll() + const count = await db.countAll(); - assert(cr1.ok) - assert(cr2.ok) - assert(cr3.ok) - assert(count === 3) - }) - }) + assert(cr1.ok); + assert(cr2.ok); + assert(cr3.ok); + assert(count === 3); + }); + }); await t.step( "Should use base model when typing selected documents", @@ -96,76 +96,76 @@ Deno.test("ext - zod", async (t) => { }, }), l_users: collection(UserSchema), - }) + }); // Default values should not be inferred as optional when selecting - const doc1 = await db.users.find("") + const doc1 = await db.users.find(""); if (doc1) { - doc1.value.age.valueOf() + doc1.value.age.valueOf(); } - const doc2 = await db.users.find("") + const doc2 = await db.users.find(""); if (doc2) { - doc2.value.age.valueOf() + doc2.value.age.valueOf(); } - const doc3 = await db.users.find("") + const doc3 = await db.users.find(""); if (doc3) { - doc3.value.age.valueOf() + doc3.value.age.valueOf(); } - }) + }); }, - ) + ); await t.step( "KvIdSchema should only successfully parse values according to KvId", () => { - const string = KvIdSchema.safeParse("") - const number = KvIdSchema.safeParse(0) - const boolean = KvIdSchema.safeParse(true) - const bigint = KvIdSchema.safeParse(0n) - const uint8array = KvIdSchema.safeParse(new Uint8Array()) - const undef = KvIdSchema.safeParse(undefined) - const nul = KvIdSchema.safeParse(null) - const date = KvIdSchema.safeParse(new Date()) - - assert(string.success) - assert(number.success) - assert(boolean.success) - assert(bigint.success) - assert(uint8array.success) - assert(!undef.success) - assert(!nul.success) - assert(!date.success) + const string = KvIdSchema.safeParse(""); + const number = KvIdSchema.safeParse(0); + const boolean = KvIdSchema.safeParse(true); + const bigint = KvIdSchema.safeParse(0n); + const uint8array = KvIdSchema.safeParse(new Uint8Array()); + const undef = KvIdSchema.safeParse(undefined); + const nul = KvIdSchema.safeParse(null); + const date = KvIdSchema.safeParse(new Date()); + + assert(string.success); + assert(number.success); + assert(boolean.success); + assert(bigint.success); + assert(uint8array.success); + assert(!undef.success); + assert(!nul.success); + assert(!date.success); }, - ) + ); await t.step( "KvValueSchema should only successfully parse values according to KvValue", () => { VALUES.forEach((val) => { - assert(KvValueSchema.safeParse(val).success) - }) + assert(KvValueSchema.safeParse(val).success); + }); notKvValues.forEach( (val) => assert(!KvValueSchema.safeParse(val).success), - ) + ); }, - ) + ); await t.step( "KvObjectSchema should only successfully parse values according to KvObject", () => { - assert(KvObjectSchema.safeParse(kvObject).success) - assert(!KvObjectSchema.safeParse(notKvObject).success) + assert(KvObjectSchema.safeParse(kvObject).success); + assert(!KvObjectSchema.safeParse(notKvObject).success); }, - ) + ); await t.step( "KvArraySchema should only successfully parse values according to KvArray", () => { - assert(KvArraySchema.safeParse(kvArray).success) - assert(!KvArraySchema.safeParse(notKvArray).success) + assert(KvArraySchema.safeParse(kvArray).success); + assert(!KvArraySchema.safeParse(notKvArray).success); }, - ) -}) + ); +}); diff --git a/tests/indexable_collection/add.test.ts b/tests/indexable_collection/add.test.ts index 170489a..4787674 100644 --- a/tests/indexable_collection/add.test.ts +++ b/tests/indexable_collection/add.test.ts @@ -1,55 +1,55 @@ -import { assert } from "../test.deps.ts" -import { mockUser1, mockUserInvalid } from "../mocks.ts" -import { useDb } from "../utils.ts" +import { assert } from "../test.deps.ts"; +import { mockUser1, mockUserInvalid } from "../mocks.ts"; +import { useDb } from "../utils.ts"; Deno.test("indexable_collection - add", async (t) => { await t.step("Should add new document entry to collection", async () => { await useDb(async (db) => { - const cr = await db.i_users.add(mockUser1) - assert(cr.ok) + const cr = await db.i_users.add(mockUser1); + assert(cr.ok); - const doc = await db.i_users.find(cr.id) - assert(doc !== null) - assert(doc.value.username === mockUser1.username) - }) - }) + const doc = await db.i_users.find(cr.id); + assert(doc !== null); + assert(doc.value.username === mockUser1.username); + }); + }); await t.step( "Should not add new document with colliding primary index", async () => { await useDb(async (db) => { - const cr1 = await db.i_users.add(mockUser1) - const cr2 = await db.i_users.add(mockUser1) - const count = await db.i_users.count() - assert(cr1.ok) - assert(!cr2.ok) - assert(count === 1) - }) + const cr1 = await db.i_users.add(mockUser1); + const cr2 = await db.i_users.add(mockUser1); + const count = await db.i_users.count(); + assert(cr1.ok); + assert(!cr2.ok); + assert(count === 1); + }); }, - ) + ); await t.step( "Should successfully parse and add new document entry to collection", async () => { await useDb(async (db) => { - const cr = await db.zi_users.add(mockUser1) - assert(cr.ok) + const cr = await db.zi_users.add(mockUser1); + assert(cr.ok); - const doc = await db.zi_users.find(cr.id) - assert(doc !== null) - assert(doc.value.username === mockUser1.username) - }) + const doc = await db.zi_users.find(cr.id); + assert(doc !== null); + assert(doc.value.username === mockUser1.username); + }); }, - ) + ); await t.step( "Should fail parse and add new document entry to collection", async () => { await useDb(async (db) => { - let assertion = false - await db.zi_users.add(mockUserInvalid).catch(() => assertion = true) - assert(assertion) - }) + let assertion = false; + await db.zi_users.add(mockUserInvalid).catch(() => assertion = true); + assert(assertion); + }); }, - ) -}) + ); +}); diff --git a/tests/indexable_collection/addMany.test.ts b/tests/indexable_collection/addMany.test.ts index 0f0af42..c1c55cb 100644 --- a/tests/indexable_collection/addMany.test.ts +++ b/tests/indexable_collection/addMany.test.ts @@ -1,72 +1,72 @@ -import { assert } from "../test.deps.ts" -import { mockUser1 } from "../mocks.ts" -import { generateInvalidUsers, generateUsers, useDb } from "../utils.ts" +import { assert } from "../test.deps.ts"; +import { mockUser1 } from "../mocks.ts"; +import { generateInvalidUsers, generateUsers, useDb } from "../utils.ts"; Deno.test("indexable_collection - addMany", async (t) => { await t.step( "Should successfully add 1000 documents to the collection", async () => { await useDb(async (db) => { - const users = generateUsers(1_000) - const cr = await db.i_users.addMany(users) - assert(cr.ok) + const users = generateUsers(1_000); + const cr = await db.i_users.addMany(users); + assert(cr.ok); - const { result } = await db.i_users.getMany() + const { result } = await db.i_users.getMany(); - assert(result.length === users.length) + assert(result.length === users.length); assert( users.every((user) => result.some((doc) => doc.value.username === user.username) ), - ) - }) + ); + }); }, - ) + ); await t.step( "Should not add documents with colliding primary indices", async () => { await useDb(async (db) => { - const cr = await db.i_users.addMany([mockUser1, mockUser1]) - const count = await db.i_users.count() + const cr = await db.i_users.addMany([mockUser1, mockUser1]); + const count = await db.i_users.count(); - assert(!cr.ok) - assert(count === 1) - }) + assert(!cr.ok); + assert(count === 1); + }); }, - ) + ); await t.step( "Should successfully parse and add 1000 documents to the collection", async () => { await useDb(async (db) => { - const users = generateUsers(1_000) - const cr = await db.zi_users.addMany(users) - assert(cr.ok) + const users = generateUsers(1_000); + const cr = await db.zi_users.addMany(users); + assert(cr.ok); - const { result } = await db.zi_users.getMany() + const { result } = await db.zi_users.getMany(); - assert(result.length === users.length) + assert(result.length === users.length); assert( users.every((user) => result.some((doc) => doc.value.username === user.username) ), - ) - }) + ); + }); }, - ) + ); await t.step( "Should fail to parse and add 1000 documents to the collection", async () => { await useDb(async (db) => { - const users = generateInvalidUsers(1_000) - let assertion = false + const users = generateInvalidUsers(1_000); + let assertion = false; - await db.zi_users.addMany(users).catch(() => assertion = true) + await db.zi_users.addMany(users).catch(() => assertion = true); - assert(assertion) - }) + assert(assertion); + }); }, - ) -}) + ); +}); diff --git a/tests/indexable_collection/count.test.ts b/tests/indexable_collection/count.test.ts index 8bd3ba6..6720ff5 100644 --- a/tests/indexable_collection/count.test.ts +++ b/tests/indexable_collection/count.test.ts @@ -1,21 +1,21 @@ -import { assert } from "../test.deps.ts" -import { generateUsers, useDb } from "../utils.ts" +import { assert } from "../test.deps.ts"; +import { generateUsers, useDb } from "../utils.ts"; Deno.test("indexable_collection - count", async (t) => { await t.step( "Should correctly count total number of documents in the collection", async () => { await useDb(async (db) => { - const count1 = await db.i_users.count() - assert(count1 === 0) + const count1 = await db.i_users.count(); + assert(count1 === 0); - const users = generateUsers(1_000) - const cr = await db.i_users.addMany(users) - assert(cr.ok) + const users = generateUsers(1_000); + const cr = await db.i_users.addMany(users); + assert(cr.ok); - const count2 = await db.i_users.count() - assert(count2 === users.length) - }) + const count2 = await db.i_users.count(); + assert(count2 === users.length); + }); }, - ) -}) + ); +}); diff --git a/tests/indexable_collection/countBySecondaryIndex.test.ts b/tests/indexable_collection/countBySecondaryIndex.test.ts index b44a8ee..b89e334 100644 --- a/tests/indexable_collection/countBySecondaryIndex.test.ts +++ b/tests/indexable_collection/countBySecondaryIndex.test.ts @@ -1,6 +1,6 @@ -import { assert } from "../test.deps.ts" -import { mockUser1, mockUser2, mockUser3 } from "../mocks.ts" -import { useDb } from "../utils.ts" +import { assert } from "../test.deps.ts"; +import { mockUser1, mockUser2, mockUser3 } from "../mocks.ts"; +import { useDb } from "../utils.ts"; Deno.test("indexable_collection - countBySecondaryIndex", async (t) => { await t.step( @@ -10,19 +10,19 @@ Deno.test("indexable_collection - countBySecondaryIndex", async (t) => { const count1 = await db.i_users.countBySecondaryIndex( "age", mockUser1.age, - ) - assert(count1 === 0) + ); + assert(count1 === 0); - const cr = await db.i_users.addMany([mockUser1, mockUser2, mockUser3]) - assert(cr.ok) + const cr = await db.i_users.addMany([mockUser1, mockUser2, mockUser3]); + assert(cr.ok); const count2 = await db.i_users.countBySecondaryIndex( "age", mockUser1.age, - ) + ); - assert(count2 === 2) - }) + assert(count2 === 2); + }); }, - ) -}) + ); +}); diff --git a/tests/indexable_collection/countBySecondaryOrder.test.ts b/tests/indexable_collection/countBySecondaryOrder.test.ts index ca988ed..956916e 100644 --- a/tests/indexable_collection/countBySecondaryOrder.test.ts +++ b/tests/indexable_collection/countBySecondaryOrder.test.ts @@ -1,6 +1,6 @@ -import { assert } from "../test.deps.ts" -import { mockUser1, mockUser2, mockUsersWithAlteredAge } from "../mocks.ts" -import { useDb } from "../utils.ts" +import { assert } from "../test.deps.ts"; +import { mockUser1, mockUser2, mockUsersWithAlteredAge } from "../mocks.ts"; +import { useDb } from "../utils.ts"; Deno.test("indexable_collection - countBySecondaryOrder", async (t) => { await t.step( @@ -9,11 +9,11 @@ Deno.test("indexable_collection - countBySecondaryOrder", async (t) => { await useDb(async (db) => { const count1 = await db.i_users.countBySecondaryOrder( "age", - ) - assert(count1 === 0) + ); + assert(count1 === 0); - const cr = await db.i_users.addMany(mockUsersWithAlteredAge) - assert(cr.ok) + const cr = await db.i_users.addMany(mockUsersWithAlteredAge); + assert(cr.ok); const count2 = await db.i_users.countBySecondaryOrder( "age", @@ -21,9 +21,9 @@ Deno.test("indexable_collection - countBySecondaryOrder", async (t) => { limit: 1, filter: (doc) => doc.value.age < mockUser1.age, }, - ) + ); - assert(count2 === 1) + assert(count2 === 1); const count3 = await db.i_users.countBySecondaryOrder( "age", @@ -31,13 +31,13 @@ Deno.test("indexable_collection - countBySecondaryOrder", async (t) => { limit: 2, filter: (doc) => doc.value.age < mockUser2.age, }, - ) + ); - assert(count3 === 2) + assert(count3 === 2); - const count4 = await db.i_users.countBySecondaryOrder("age") - assert(count4 === 3) - }) + const count4 = await db.i_users.countBySecondaryOrder("age"); + assert(count4 === 3); + }); }, - ) -}) + ); +}); diff --git a/tests/indexable_collection/delete.test.ts b/tests/indexable_collection/delete.test.ts index 05526ad..779a464 100644 --- a/tests/indexable_collection/delete.test.ts +++ b/tests/indexable_collection/delete.test.ts @@ -1,67 +1,67 @@ -import { assert } from "../test.deps.ts" -import { mockUser1 } from "../mocks.ts" -import { generateUsers, useDb } from "../utils.ts" +import { assert } from "../test.deps.ts"; +import { mockUser1 } from "../mocks.ts"; +import { generateUsers, useDb } from "../utils.ts"; Deno.test("indexable_collection - delete", async (t) => { await t.step( "Should successfully delete a document and its indices from the collection", async () => { await useDb(async (db) => { - const cr = await db.i_users.add(mockUser1) - const count1 = await db.i_users.count() + const cr = await db.i_users.add(mockUser1); + const count1 = await db.i_users.count(); const byPrimary1 = await db.i_users.findByPrimaryIndex( "username", mockUser1.username, - ) + ); const bySecondary1 = await db.i_users.findBySecondaryIndex( "age", mockUser1.age, - ) + ); - assert(cr.ok) - assert(count1 === 1) - assert(byPrimary1?.id === cr.id) - assert(bySecondary1.result.at(0)?.id === cr.id) + assert(cr.ok); + assert(count1 === 1); + assert(byPrimary1?.id === cr.id); + assert(bySecondary1.result.at(0)?.id === cr.id); - await db.i_users.delete(cr.id) + await db.i_users.delete(cr.id); - const count2 = await db.i_users.count() - const doc = await db.i_users.find(cr.id) + const count2 = await db.i_users.count(); + const doc = await db.i_users.find(cr.id); const byPrimary2 = await db.i_users.findByPrimaryIndex( "username", mockUser1.username, - ) + ); const bySecondary2 = await db.i_users.findBySecondaryIndex( "age", mockUser1.age, - ) + ); - assert(count2 === 0) - assert(doc === null) - assert(byPrimary2 === null) - assert(bySecondary2.result.length === 0) - }) + assert(count2 === 0); + assert(doc === null); + assert(byPrimary2 === null); + assert(bySecondary2.result.length === 0); + }); }, - ) + ); await t.step( "Should successfully delete 1000 documents from the collection", async () => { await useDb(async (db) => { - const users = generateUsers(1_000) - const cr = await db.i_users.addMany(users) - const count1 = await db.i_users.count() + const users = generateUsers(1_000); + const cr = await db.i_users.addMany(users); + const count1 = await db.i_users.count(); - assert(cr.ok) - assert(count1 === users.length) + assert(cr.ok); + assert(count1 === users.length); - const { result: ids } = await db.i_users.map((doc) => doc.id) + const { result: ids } = await db.i_users.map((doc) => doc.id); - await db.i_users.delete(...ids) + await db.i_users.delete(...ids); - const count2 = await db.i_users.count() - assert(count2 === 0) - }) + const count2 = await db.i_users.count(); + assert(count2 === 0); + }); }, - ) -}) + ); +}); diff --git a/tests/indexable_collection/deleteByPrimaryIndex.test.ts b/tests/indexable_collection/deleteByPrimaryIndex.test.ts index 293523d..dbcdcfe 100644 --- a/tests/indexable_collection/deleteByPrimaryIndex.test.ts +++ b/tests/indexable_collection/deleteByPrimaryIndex.test.ts @@ -1,46 +1,46 @@ -import { assert } from "../test.deps.ts" -import { mockUser1 } from "../mocks.ts" -import { useDb } from "../utils.ts" +import { assert } from "../test.deps.ts"; +import { mockUser1 } from "../mocks.ts"; +import { useDb } from "../utils.ts"; Deno.test("indexable_collection - deleteByPrimaryIndex", async (t) => { await t.step( "Should successfully delete a document and its indices from the collection by primary index", async () => { await useDb(async (db) => { - const cr = await db.i_users.add(mockUser1) - const count1 = await db.i_users.count() + const cr = await db.i_users.add(mockUser1); + const count1 = await db.i_users.count(); const byPrimary1 = await db.i_users.findByPrimaryIndex( "username", mockUser1.username, - ) + ); const bySecondary1 = await db.i_users.findBySecondaryIndex( "age", mockUser1.age, - ) + ); - assert(cr.ok) - assert(count1 === 1) - assert(byPrimary1?.id === cr.id) - assert(bySecondary1.result.at(0)?.id === cr.id) + assert(cr.ok); + assert(count1 === 1); + assert(byPrimary1?.id === cr.id); + assert(bySecondary1.result.at(0)?.id === cr.id); - await db.i_users.deleteByPrimaryIndex("username", mockUser1.username) + await db.i_users.deleteByPrimaryIndex("username", mockUser1.username); - const count2 = await db.i_users.count() - const doc = await db.i_users.find(cr.id) + const count2 = await db.i_users.count(); + const doc = await db.i_users.find(cr.id); const byPrimary2 = await db.i_users.findByPrimaryIndex( "username", mockUser1.username, - ) + ); const bySecondary2 = await db.i_users.findBySecondaryIndex( "age", mockUser1.age, - ) + ); - assert(count2 === 0) - assert(doc === null) - assert(byPrimary2 === null) - assert(bySecondary2.result.length === 0) - }) + assert(count2 === 0); + assert(doc === null); + assert(byPrimary2 === null); + assert(bySecondary2.result.length === 0); + }); }, - ) -}) + ); +}); diff --git a/tests/indexable_collection/deleteBySecondaryIndex.test.ts b/tests/indexable_collection/deleteBySecondaryIndex.test.ts index d4e1bcb..29df499 100644 --- a/tests/indexable_collection/deleteBySecondaryIndex.test.ts +++ b/tests/indexable_collection/deleteBySecondaryIndex.test.ts @@ -1,48 +1,48 @@ -import { assert } from "../test.deps.ts" -import { mockUser1, mockUser2 } from "../mocks.ts" -import { useDb } from "../utils.ts" +import { assert } from "../test.deps.ts"; +import { mockUser1, mockUser2 } from "../mocks.ts"; +import { useDb } from "../utils.ts"; Deno.test("indexable_collection - deleteBySecondaryIndex", async (t) => { await t.step( "Should delete documents and indices from the collection by secondary index", async () => { await useDb(async (db) => { - const cr1 = await db.i_users.add(mockUser1) - const cr2 = await db.i_users.add(mockUser2) - assert(cr1.ok && cr2.ok) - const count1 = await db.i_users.count() + const cr1 = await db.i_users.add(mockUser1); + const cr2 = await db.i_users.add(mockUser2); + assert(cr1.ok && cr2.ok); + const count1 = await db.i_users.count(); const byPrimary1 = await db.i_users.findByPrimaryIndex( "username", mockUser1.username, - ) + ); const bySecondary1 = await db.i_users.findBySecondaryIndex( "age", mockUser1.age, - ) + ); - assert(count1 === 2) - assert(byPrimary1?.value.username === mockUser1.username) - assert(bySecondary1.result.length === 2) + assert(count1 === 2); + assert(byPrimary1?.value.username === mockUser1.username); + assert(bySecondary1.result.length === 2); - await db.i_users.deleteBySecondaryIndex("age", mockUser1.age) + await db.i_users.deleteBySecondaryIndex("age", mockUser1.age); - const count2 = await db.i_users.count() + const count2 = await db.i_users.count(); const byPrimary2 = await db.i_users.findByPrimaryIndex( "username", mockUser1.username, - ) + ); const bySecondary2 = await db.i_users.findBySecondaryIndex( "age", mockUser1.age, - ) + ); - assert(count2 === 0) - assert(byPrimary2 === null) - assert(bySecondary2.result.length === 0) - }) + assert(count2 === 0); + assert(byPrimary2 === null); + assert(bySecondary2.result.length === 0); + }); }, - ) -}) + ); +}); diff --git a/tests/indexable_collection/deleteMany.test.ts b/tests/indexable_collection/deleteMany.test.ts index 6210fd3..cc560d9 100644 --- a/tests/indexable_collection/deleteMany.test.ts +++ b/tests/indexable_collection/deleteMany.test.ts @@ -1,46 +1,46 @@ -import { assert } from "../test.deps.ts" -import { generateUsers, useDb } from "../utils.ts" +import { assert } from "../test.deps.ts"; +import { generateUsers, useDb } from "../utils.ts"; Deno.test("indexable_collection - deleteMany", async (t) => { await t.step( "Should delete all documents and indices from the collection", async () => { await useDb(async (db) => { - const users = generateUsers(1_000) - const user1 = users[0] + const users = generateUsers(1_000); + const user1 = users[0]; - const cr = await db.i_users.addMany(users) - const count1 = await db.i_users.count() + const cr = await db.i_users.addMany(users); + const count1 = await db.i_users.count(); const byPrimary1 = await db.i_users.findByPrimaryIndex( "username", user1.username, - ) + ); const bySecondary1 = await db.i_users.findBySecondaryIndex( "age", user1.age, - ) + ); - assert(cr.ok) - assert(count1 === users.length) - assert(byPrimary1?.value.username === user1.username) - assert(bySecondary1.result.length > 0) + assert(cr.ok); + assert(count1 === users.length); + assert(byPrimary1?.value.username === user1.username); + assert(bySecondary1.result.length > 0); - await db.i_users.deleteMany() + await db.i_users.deleteMany(); - const count2 = await db.i_users.count() + const count2 = await db.i_users.count(); const byPrimary2 = await db.i_users.findByPrimaryIndex( "username", user1.username, - ) + ); const bySecondary2 = await db.i_users.findBySecondaryIndex( "age", user1.age, - ) + ); - assert(count2 === 0) - assert(byPrimary2 === null) - assert(bySecondary2.result.length === 0) - }) + assert(count2 === 0); + assert(byPrimary2 === null); + assert(bySecondary2.result.length === 0); + }); }, - ) -}) + ); +}); diff --git a/tests/indexable_collection/deleteManyBySecondaryOrder.test.ts b/tests/indexable_collection/deleteManyBySecondaryOrder.test.ts index b844851..3ee058b 100644 --- a/tests/indexable_collection/deleteManyBySecondaryOrder.test.ts +++ b/tests/indexable_collection/deleteManyBySecondaryOrder.test.ts @@ -1,28 +1,28 @@ -import { assert, assertEquals } from "../test.deps.ts" -import { mockUser2, mockUsersWithAlteredAge } from "../mocks.ts" -import { useDb } from "../utils.ts" +import { assert, assertEquals } from "../test.deps.ts"; +import { mockUser2, mockUsersWithAlteredAge } from "../mocks.ts"; +import { useDb } from "../utils.ts"; Deno.test("indexable_collection - deleteManyBySecondaryOrder", async (t) => { await t.step( "Should delete documents and indices from the collection by secondary order", async () => { await useDb(async (db) => { - const cr = await db.i_users.addMany(mockUsersWithAlteredAge) - const count1 = await db.i_users.count() - assert(cr.ok) - assertEquals(count1, mockUsersWithAlteredAge.length) + const cr = await db.i_users.addMany(mockUsersWithAlteredAge); + const count1 = await db.i_users.count(); + assert(cr.ok); + assertEquals(count1, mockUsersWithAlteredAge.length); await db.i_users.deleteManyBySecondaryOrder("age", { limit: mockUsersWithAlteredAge.length - 1, - }) + }); - const count2 = await db.i_users.count() - const doc = await db.i_users.getOne() + const count2 = await db.i_users.count(); + const doc = await db.i_users.getOne(); - assertEquals(count2, 1) - assertEquals(doc?.value.username, mockUser2.username) - assertEquals(doc?.value.address, mockUser2.address) - }) + assertEquals(count2, 1); + assertEquals(doc?.value.username, mockUser2.username); + assertEquals(doc?.value.address, mockUser2.address); + }); }, - ) -}) + ); +}); diff --git a/tests/indexable_collection/enqueue.test.ts b/tests/indexable_collection/enqueue.test.ts index 462cbdf..92b0a5b 100644 --- a/tests/indexable_collection/enqueue.test.ts +++ b/tests/indexable_collection/enqueue.test.ts @@ -4,100 +4,100 @@ import { type KvValue, model, type QueueMessage, -} from "../../mod.ts" -import { createHandlerId } from "../../src/utils.ts" -import { assert } from "../test.deps.ts" -import type { User } from "../models.ts" -import { createResolver, useDb, useKv } from "../utils.ts" +} from "../../mod.ts"; +import { createHandlerId } from "../../src/utils.ts"; +import { assert } from "../test.deps.ts"; +import type { User } from "../models.ts"; +import { createResolver, useDb, useKv } from "../utils.ts"; Deno.test("indexable_collection - enqueue", async (t) => { await t.step("Should enqueue message with string data", async () => { await useKv(async (kv) => { - const data = "data" - const undeliveredId = "undelivered" + const data = "data"; + const undeliveredId = "undelivered"; const db = kvdex(kv, { i_users: collection(model(), { indices: {} }), - }) + }); - const sleeper = createResolver() - const handlerId = createHandlerId(db.i_users._keys.base, undefined) - let assertion = false + const sleeper = createResolver(); + const handlerId = createHandlerId(db.i_users._keys.base, undefined); + let assertion = false; const listener = kv.listenQueue((msg) => { - const qMsg = msg as QueueMessage - assertion = qMsg.__handlerId__ === handlerId && qMsg.__data__ === data - sleeper.resolve() - }) + const qMsg = msg as QueueMessage; + assertion = qMsg.__handlerId__ === handlerId && qMsg.__data__ === data; + sleeper.resolve(); + }); await db.i_users.enqueue(data, { idsIfUndelivered: [undeliveredId], - }) + }); - await sleeper.promise + await sleeper.promise; - const undelivered = await db.i_users.findUndelivered(undeliveredId) - assert(assertion || typeof undelivered?.value === typeof data) + const undelivered = await db.i_users.findUndelivered(undeliveredId); + assert(assertion || typeof undelivered?.value === typeof data); - return async () => await listener - }) - }) + return async () => await listener; + }); + }); await t.step("Should enqueue message in correct topic", async () => { await useDb(async (db) => { - const data = "data" - const undeliveredId = "undelivered" - const topic = "topic" + const data = "data"; + const undeliveredId = "undelivered"; + const topic = "topic"; - const sleeper = createResolver() - let assertion1 = false - let assertion2 = true + const sleeper = createResolver(); + let assertion1 = false; + let assertion2 = true; const l1 = db.i_users.listenQueue(() => { - assertion1 = true - sleeper.resolve() - }, { topic }) + assertion1 = true; + sleeper.resolve(); + }, { topic }); - const l2 = db.i_users.listenQueue(() => assertion2 = false) + const l2 = db.i_users.listenQueue(() => assertion2 = false); await db.i_users.enqueue("data", { idsIfUndelivered: [undeliveredId], topic, - }) + }); - await sleeper.promise + await sleeper.promise; - const undelivered = await db.i_users.findUndelivered(undeliveredId) - assert(assertion1 || typeof undelivered?.value === typeof data) - assert(assertion2) + const undelivered = await db.i_users.findUndelivered(undeliveredId); + assert(assertion1 || typeof undelivered?.value === typeof data); + assert(assertion2); - return async () => await Promise.all([l1, l2]) - }) - }) + return async () => await Promise.all([l1, l2]); + }); + }); await t.step("Should enqueue message with undefined data", async () => { await useDb(async (db) => { - const data = undefined - const undeliveredId = "undelivered" - const sleeper = createResolver() + const data = undefined; + const undeliveredId = "undelivered"; + const sleeper = createResolver(); - let assertion = false + let assertion = false; const listener = db.i_users.listenQueue((msg) => { - assertion = msg === data - sleeper.resolve() - }) + assertion = msg === data; + sleeper.resolve(); + }); await db.i_users.enqueue(data, { idsIfUndelivered: [undeliveredId], - }) + }); - await sleeper.promise + await sleeper.promise; - const undelivered = await db.i_users.findUndelivered(undeliveredId) - assert(assertion || typeof undelivered?.value === typeof data) + const undelivered = await db.i_users.findUndelivered(undeliveredId); + assert(assertion || typeof undelivered?.value === typeof data); - return async () => await listener - }) - }) -}) + return async () => await listener; + }); + }); +}); diff --git a/tests/indexable_collection/find.test.ts b/tests/indexable_collection/find.test.ts index daf9ab0..b7bc916 100644 --- a/tests/indexable_collection/find.test.ts +++ b/tests/indexable_collection/find.test.ts @@ -1,25 +1,25 @@ -import { assert } from "../test.deps.ts" -import { mockUser1 } from "../mocks.ts" -import { useDb } from "../utils.ts" +import { assert } from "../test.deps.ts"; +import { mockUser1 } from "../mocks.ts"; +import { useDb } from "../utils.ts"; Deno.test("indexable_collection - find", async (t) => { await t.step("Should find document by id", async () => { await useDb(async (db) => { - const id = "id" + const id = "id"; - const cr = await db.i_users.set(id, mockUser1) - assert(cr.ok) + const cr = await db.i_users.set(id, mockUser1); + assert(cr.ok); - const doc = await db.i_users.find(id) - assert(doc !== null) - assert(doc.value.username === mockUser1.username) - }) - }) + const doc = await db.i_users.find(id); + assert(doc !== null); + assert(doc.value.username === mockUser1.username); + }); + }); await t.step("Should not find document by non-existing id", async () => { await useDb(async (db) => { - const doc = await db.i_users.find("123") - assert(doc === null) - }) - }) -}) + const doc = await db.i_users.find("123"); + assert(doc === null); + }); + }); +}); diff --git a/tests/indexable_collection/findByPrimaryIndex.test.ts b/tests/indexable_collection/findByPrimaryIndex.test.ts index 3a92605..82c0146 100644 --- a/tests/indexable_collection/findByPrimaryIndex.test.ts +++ b/tests/indexable_collection/findByPrimaryIndex.test.ts @@ -1,50 +1,50 @@ -import { assert } from "../test.deps.ts" -import { mockUser1 } from "../mocks.ts" -import { TransformUserModel } from "../models.ts" -import { useDb } from "../utils.ts" +import { assert } from "../test.deps.ts"; +import { mockUser1 } from "../mocks.ts"; +import { TransformUserModel } from "../models.ts"; +import { useDb } from "../utils.ts"; Deno.test("indexable_collection - findByPrimaryIndex", async (t) => { await t.step("Should find document by primary index", async () => { await useDb(async (db) => { - const id = "id" + const id = "id"; - const cr = await db.i_users.set(id, mockUser1) - assert(cr.ok) + const cr = await db.i_users.set(id, mockUser1); + assert(cr.ok); const doc = await db.i_users.findByPrimaryIndex( "username", mockUser1.username, - ) - assert(doc !== null) - assert(doc.value.username === mockUser1.username) - }) - }) + ); + assert(doc !== null); + assert(doc.value.username === mockUser1.username); + }); + }); await t.step("Should not find document by non-existing index", async () => { await useDb(async (db) => { const doc = await db.i_users.findByPrimaryIndex( "username", mockUser1.username, - ) - assert(doc === null) - }) - }) + ); + assert(doc === null); + }); + }); await t.step( "Should find document by asymmetric model primary index", async () => { await useDb(async (db) => { - const transformed = TransformUserModel.parse(mockUser1) + const transformed = TransformUserModel.parse(mockUser1); - const cr = await db.ai_users.add(mockUser1) - assert(cr.ok) + const cr = await db.ai_users.add(mockUser1); + assert(cr.ok); const doc = await db.ai_users.findByPrimaryIndex( "name", transformed.name, - ) - assert(doc?.value.name === transformed.name) - }) + ); + assert(doc?.value.name === transformed.name); + }); }, - ) -}) + ); +}); diff --git a/tests/indexable_collection/findBySecondaryIndex.test.ts b/tests/indexable_collection/findBySecondaryIndex.test.ts index e8c9ccd..e6708ef 100644 --- a/tests/indexable_collection/findBySecondaryIndex.test.ts +++ b/tests/indexable_collection/findBySecondaryIndex.test.ts @@ -1,61 +1,61 @@ -import { assert } from "../test.deps.ts" -import { mockUser1, mockUser2 } from "../mocks.ts" -import { TransformUserModel } from "../models.ts" -import { useDb } from "../utils.ts" +import { assert } from "../test.deps.ts"; +import { mockUser1, mockUser2 } from "../mocks.ts"; +import { TransformUserModel } from "../models.ts"; +import { useDb } from "../utils.ts"; Deno.test("indexable_collection - findBySecondaryIndex", async (t) => { await t.step("Should find documents by secondary index", async () => { await useDb(async (db) => { - const cr1 = await db.i_users.add(mockUser1) - const cr2 = await db.i_users.add(mockUser2) - assert(cr1.ok && cr2.ok) + const cr1 = await db.i_users.add(mockUser1); + const cr2 = await db.i_users.add(mockUser2); + assert(cr1.ok && cr2.ok); const bySecondary = await db.i_users.findBySecondaryIndex( "age", mockUser1.age, - ) + ); - assert(bySecondary.result.length === 2) - assert(bySecondary.result.some((doc) => doc.id === cr1.id)) - assert(bySecondary.result.some((doc) => doc.id === cr2.id)) - }) - }) + assert(bySecondary.result.length === 2); + assert(bySecondary.result.some((doc) => doc.id === cr1.id)); + assert(bySecondary.result.some((doc) => doc.id === cr2.id)); + }); + }); await t.step( "Should not find documents by non-existing secondary index", async () => { await useDb(async (db) => { - const cr1 = await db.i_users.add(mockUser1) - const cr2 = await db.i_users.add(mockUser2) - assert(cr1.ok && cr2.ok) + const cr1 = await db.i_users.add(mockUser1); + const cr2 = await db.i_users.add(mockUser2); + assert(cr1.ok && cr2.ok); const bySecondary = await db.i_users.findBySecondaryIndex( "age", -1, - ) + ); - assert(bySecondary.result.length === 0) - }) + assert(bySecondary.result.length === 0); + }); }, - ) + ); await t.step( "Should find documents by asymmetric model secondary index", async () => { await useDb(async (db) => { - const t1 = TransformUserModel.parse(mockUser1) - const t2 = TransformUserModel.parse(mockUser2) + const t1 = TransformUserModel.parse(mockUser1); + const t2 = TransformUserModel.parse(mockUser2); - const cr = await db.ai_users.addMany([mockUser1, mockUser2]) - assert(cr.ok) + const cr = await db.ai_users.addMany([mockUser1, mockUser2]); + assert(cr.ok); const { result } = await db.ai_users.findBySecondaryIndex( "decadeAge", t1.decadeAge, - ) - result.some((doc) => doc.value.name === t1.name) - result.some((doc) => doc.value.name === t2.name) - }) + ); + result.some((doc) => doc.value.name === t1.name); + result.some((doc) => doc.value.name === t2.name); + }); }, - ) -}) + ); +}); diff --git a/tests/indexable_collection/findMany.test.ts b/tests/indexable_collection/findMany.test.ts index 9dc672e..b536cb4 100644 --- a/tests/indexable_collection/findMany.test.ts +++ b/tests/indexable_collection/findMany.test.ts @@ -1,32 +1,32 @@ -import { assert } from "../test.deps.ts" -import { generateUsers, useDb } from "../utils.ts" +import { assert } from "../test.deps.ts"; +import { generateUsers, useDb } from "../utils.ts"; Deno.test("indexable_collection - findMany", async (t) => { await t.step("Should find all documents", async () => { await useDb(async (db) => { - const users = generateUsers(1_000) - const cr = await db.i_users.addMany(users) - assert(cr.ok) + const users = generateUsers(1_000); + const cr = await db.i_users.addMany(users); + assert(cr.ok); - const { result: docs } = await db.i_users.getMany() + const { result: docs } = await db.i_users.getMany(); - assert(docs.length === users.length) + assert(docs.length === users.length); assert( users.every((user) => docs.some((doc) => doc.value.username === user.username) ), - ) - }) - }) + ); + }); + }); await t.step("Should not find any documents", async () => { await useDb(async (db) => { - const users = generateUsers(10) - const cr = await db.i_users.addMany(users) - assert(cr.ok) + const users = generateUsers(10); + const cr = await db.i_users.addMany(users); + assert(cr.ok); - const docs = await db.i_users.findMany(["", "", ""]) - assert(docs.length === 0) - }) - }) -}) + const docs = await db.i_users.findMany(["", "", ""]); + assert(docs.length === 0); + }); + }); +}); diff --git a/tests/indexable_collection/forEach.test.ts b/tests/indexable_collection/forEach.test.ts index 70361fb..8da46d3 100644 --- a/tests/indexable_collection/forEach.test.ts +++ b/tests/indexable_collection/forEach.test.ts @@ -1,27 +1,27 @@ -import type { Document } from "../../mod.ts" -import { assert } from "../test.deps.ts" -import type { User } from "../models.ts" -import { generateUsers, useDb } from "../utils.ts" +import type { Document } from "../../mod.ts"; +import { assert } from "../test.deps.ts"; +import type { User } from "../models.ts"; +import { generateUsers, useDb } from "../utils.ts"; Deno.test("indexable_collection - forEach", async (t) => { await t.step( "Should run callback function for each document in the collection", async () => { await useDb(async (db) => { - const users = generateUsers(1_000) - const cr = await db.i_users.addMany(users) - assert(cr.ok) + const users = generateUsers(1_000); + const cr = await db.i_users.addMany(users); + assert(cr.ok); - const docs: Document[] = [] - await db.i_users.forEach((doc) => docs.push(doc)) + const docs: Document[] = []; + await db.i_users.forEach((doc) => docs.push(doc)); - assert(docs.length === users.length) + assert(docs.length === users.length); assert( users.every((user) => docs.some((doc) => doc.value.username === user.username) ), - ) - }) + ); + }); }, - ) -}) + ); +}); diff --git a/tests/indexable_collection/forEachBySecondaryIndex.test.ts b/tests/indexable_collection/forEachBySecondaryIndex.test.ts index a71c598..07ed07d 100644 --- a/tests/indexable_collection/forEachBySecondaryIndex.test.ts +++ b/tests/indexable_collection/forEachBySecondaryIndex.test.ts @@ -1,29 +1,29 @@ -import type { Document } from "../../mod.ts" -import { assert } from "../test.deps.ts" -import { mockUser1, mockUser2, mockUser3 } from "../mocks.ts" -import type { User } from "../models.ts" -import { useDb } from "../utils.ts" +import type { Document } from "../../mod.ts"; +import { assert } from "../test.deps.ts"; +import { mockUser1, mockUser2, mockUser3 } from "../mocks.ts"; +import type { User } from "../models.ts"; +import { useDb } from "../utils.ts"; Deno.test("indexable_collection - forEachBySecondaryIndex", async (t) => { await t.step( "Should run callback function for each document in the collection by secondary index", async () => { await useDb(async (db) => { - const cr = await db.i_users.addMany([mockUser1, mockUser2, mockUser3]) - assert(cr.ok) + const cr = await db.i_users.addMany([mockUser1, mockUser2, mockUser3]); + assert(cr.ok); - const docs: Document[] = [] + const docs: Document[] = []; await db.i_users.forEachBySecondaryIndex( "age", mockUser1.age, (doc) => docs.push(doc), - ) + ); - assert(docs.length === 2) - assert(docs.some((doc) => doc.value.username === mockUser1.username)) - assert(docs.some((doc) => doc.value.username === mockUser2.username)) - assert(!docs.some((doc) => doc.value.username === mockUser3.username)) - }) + assert(docs.length === 2); + assert(docs.some((doc) => doc.value.username === mockUser1.username)); + assert(docs.some((doc) => doc.value.username === mockUser2.username)); + assert(!docs.some((doc) => doc.value.username === mockUser3.username)); + }); }, - ) -}) + ); +}); diff --git a/tests/indexable_collection/forEachBySecondaryOrder.test.ts b/tests/indexable_collection/forEachBySecondaryOrder.test.ts index e6ada9b..5b43cca 100644 --- a/tests/indexable_collection/forEachBySecondaryOrder.test.ts +++ b/tests/indexable_collection/forEachBySecondaryOrder.test.ts @@ -1,32 +1,32 @@ -import type { Document } from "../../mod.ts" -import { assert } from "../test.deps.ts" +import type { Document } from "../../mod.ts"; +import { assert } from "../test.deps.ts"; import { mockUser1, mockUser2, mockUser3, mockUsersWithAlteredAge, -} from "../mocks.ts" -import type { User } from "../models.ts" -import { useDb } from "../utils.ts" +} from "../mocks.ts"; +import type { User } from "../models.ts"; +import { useDb } from "../utils.ts"; Deno.test("indexable_collection - forEachBySecondaryOrder", async (t) => { await t.step( "Should run callback function for each document in the collection by secondary order", async () => { await useDb(async (db) => { - const cr = await db.i_users.addMany(mockUsersWithAlteredAge) - assert(cr.ok) + const cr = await db.i_users.addMany(mockUsersWithAlteredAge); + assert(cr.ok); - const docs: Document[] = [] + const docs: Document[] = []; await db.i_users.forEachBySecondaryOrder( "age", (doc) => docs.push(doc), - ) + ); - assert(docs[0].value.username === mockUser3.username) - assert(docs[1].value.username === mockUser1.username) - assert(docs[2].value.username === mockUser2.username) - }) + assert(docs[0].value.username === mockUser3.username); + assert(docs[1].value.username === mockUser1.username); + assert(docs[2].value.username === mockUser2.username); + }); }, - ) -}) + ); +}); diff --git a/tests/indexable_collection/getMany.test.ts b/tests/indexable_collection/getMany.test.ts index 26c1ca0..0235379 100644 --- a/tests/indexable_collection/getMany.test.ts +++ b/tests/indexable_collection/getMany.test.ts @@ -1,21 +1,21 @@ -import { assert } from "../test.deps.ts" -import { generateUsers } from "../utils.ts" -import { useDb } from "../utils.ts" +import { assert } from "../test.deps.ts"; +import { generateUsers } from "../utils.ts"; +import { useDb } from "../utils.ts"; Deno.test("indexable_collection - getMany", async (t) => { await t.step("Should get all documents", async () => { await useDb(async (db) => { - const users = generateUsers(1_000) - const cr = await db.i_users.addMany(users) - assert(cr.ok) + const users = generateUsers(1_000); + const cr = await db.i_users.addMany(users); + assert(cr.ok); - const { result } = await db.i_users.getMany() - assert(result.length === users.length) + const { result } = await db.i_users.getMany(); + assert(result.length === users.length); assert( users.every((user) => result.some((doc) => doc.value.username === user.username) ), - ) - }) - }) -}) + ); + }); + }); +}); diff --git a/tests/indexable_collection/getManyBySecondaryOrder.test.ts b/tests/indexable_collection/getManyBySecondaryOrder.test.ts index 37042f4..e9963d5 100644 --- a/tests/indexable_collection/getManyBySecondaryOrder.test.ts +++ b/tests/indexable_collection/getManyBySecondaryOrder.test.ts @@ -3,21 +3,21 @@ import { mockUser2, mockUser3, mockUsersWithAlteredAge, -} from "../mocks.ts" -import { assert } from "../test.deps.ts" -import { useDb } from "../utils.ts" +} from "../mocks.ts"; +import { assert } from "../test.deps.ts"; +import { useDb } from "../utils.ts"; Deno.test("indexable_collection - getManyBySecondaryOrder", async (t) => { await t.step("Should get all documents by secondary order", async () => { await useDb(async (db) => { - const cr = await db.i_users.addMany(mockUsersWithAlteredAge) - assert(cr.ok) + const cr = await db.i_users.addMany(mockUsersWithAlteredAge); + assert(cr.ok); - const { result } = await db.i_users.getManyBySecondaryOrder("age") - assert(result.length === mockUsersWithAlteredAge.length) - assert(result[0].value.username === mockUser3.username) - assert(result[1].value.username === mockUser1.username) - assert(result[2].value.username === mockUser2.username) - }) - }) -}) + const { result } = await db.i_users.getManyBySecondaryOrder("age"); + assert(result.length === mockUsersWithAlteredAge.length); + assert(result[0].value.username === mockUser3.username); + assert(result[1].value.username === mockUser1.username); + assert(result[2].value.username === mockUser2.username); + }); + }); +}); diff --git a/tests/indexable_collection/getOne.test.ts b/tests/indexable_collection/getOne.test.ts index 650f433..69dfc50 100644 --- a/tests/indexable_collection/getOne.test.ts +++ b/tests/indexable_collection/getOne.test.ts @@ -1,20 +1,20 @@ -import { assert } from "../test.deps.ts" -import { sleep, useDb } from "../utils.ts" -import { mockUser1, mockUser2 } from "../mocks.ts" +import { assert } from "../test.deps.ts"; +import { sleep, useDb } from "../utils.ts"; +import { mockUser1, mockUser2 } from "../mocks.ts"; Deno.test("indexable_collection - getOne", async (t) => { await t.step("Should get only one document", async () => { await useDb(async (db) => { - const cr1 = await db.i_users.add(mockUser1) - await sleep(10) - const cr2 = await db.i_users.add(mockUser2) + const cr1 = await db.i_users.add(mockUser1); + await sleep(10); + const cr2 = await db.i_users.add(mockUser2); - assert(cr1.ok) - assert(cr2.ok) + assert(cr1.ok); + assert(cr2.ok); - const doc = await db.i_users.getOne() - assert(doc !== null) - assert(doc.value.username === mockUser1.username) - }) - }) -}) + const doc = await db.i_users.getOne(); + assert(doc !== null); + assert(doc.value.username === mockUser1.username); + }); + }); +}); diff --git a/tests/indexable_collection/getOneBySecondaryIndex.test.ts b/tests/indexable_collection/getOneBySecondaryIndex.test.ts index a78fcf1..113e63d 100644 --- a/tests/indexable_collection/getOneBySecondaryIndex.test.ts +++ b/tests/indexable_collection/getOneBySecondaryIndex.test.ts @@ -1,26 +1,26 @@ -import { assert } from "../test.deps.ts" -import { sleep, useDb } from "../utils.ts" -import { mockUser1, mockUser2 } from "../mocks.ts" +import { assert } from "../test.deps.ts"; +import { sleep, useDb } from "../utils.ts"; +import { mockUser1, mockUser2 } from "../mocks.ts"; Deno.test("indexable_collection - getOneBySecondaryIndex", async (t) => { await t.step( "Should get only one document by a secondary index", async () => { await useDb(async (db) => { - const cr1 = await db.i_users.add(mockUser1) - await sleep(10) - const cr2 = await db.i_users.add(mockUser2) + const cr1 = await db.i_users.add(mockUser1); + await sleep(10); + const cr2 = await db.i_users.add(mockUser2); - assert(cr1.ok) - assert(cr2.ok) + assert(cr1.ok); + assert(cr2.ok); const doc = await db.i_users.getOneBySecondaryIndex( "age", mockUser2.age, - ) - assert(doc !== null) - assert(doc.value.username === mockUser1.username) - }) + ); + assert(doc !== null); + assert(doc.value.username === mockUser1.username); + }); }, - ) -}) + ); +}); diff --git a/tests/indexable_collection/getOneBySecondaryOrder.test.ts b/tests/indexable_collection/getOneBySecondaryOrder.test.ts index eaba000..4802cdb 100644 --- a/tests/indexable_collection/getOneBySecondaryOrder.test.ts +++ b/tests/indexable_collection/getOneBySecondaryOrder.test.ts @@ -1,17 +1,17 @@ -import { assert } from "../test.deps.ts" -import { useDb } from "../utils.ts" -import { mockUser3, mockUsersWithAlteredAge } from "../mocks.ts" +import { assert } from "../test.deps.ts"; +import { useDb } from "../utils.ts"; +import { mockUser3, mockUsersWithAlteredAge } from "../mocks.ts"; Deno.test("indexable_collection - getOneBySecondaryOrder", async (t) => { await t.step("Should get only one document by secondary order", async () => { await useDb(async (db) => { - const cr = await db.i_users.addMany(mockUsersWithAlteredAge) + const cr = await db.i_users.addMany(mockUsersWithAlteredAge); - assert(cr.ok) + assert(cr.ok); - const doc = await db.i_users.getOneBySecondaryOrder("age") - assert(doc !== null) - assert(doc.value.username === mockUser3.username) - }) - }) -}) + const doc = await db.i_users.getOneBySecondaryOrder("age"); + assert(doc !== null); + assert(doc.value.username === mockUser3.username); + }); + }); +}); diff --git a/tests/indexable_collection/history.test.ts b/tests/indexable_collection/history.test.ts index b0df01a..f7fcf65 100644 --- a/tests/indexable_collection/history.test.ts +++ b/tests/indexable_collection/history.test.ts @@ -1,8 +1,8 @@ -import { collection, kvdex, model } from "../../mod.ts" -import { assert } from "../test.deps.ts" -import { generateUsers, sleep, useKv } from "../utils.ts" -import { mockUser1, mockUser2, mockUser3 } from "../mocks.ts" -import type { User } from "../models.ts" +import { collection, kvdex, model } from "../../mod.ts"; +import { assert } from "../test.deps.ts"; +import { generateUsers, sleep, useKv } from "../utils.ts"; +import { mockUser1, mockUser2, mockUser3 } from "../mocks.ts"; +import type { User } from "../models.ts"; Deno.test("indexable_collection - history", async (t) => { await t.step( @@ -17,27 +17,27 @@ Deno.test("indexable_collection - history", async (t) => { age: "secondary", }, }), - }) + }); - const id = "id" - await db.users.set(id, mockUser1, { overwrite: true }) - await sleep(10) - await db.users.set(id, mockUser2, { overwrite: true }) - await sleep(10) - await db.users.set(id, mockUser3, { overwrite: true }) + const id = "id"; + await db.users.set(id, mockUser1, { overwrite: true }); + await sleep(10); + await db.users.set(id, mockUser2, { overwrite: true }); + await sleep(10); + await db.users.set(id, mockUser3, { overwrite: true }); - const { result: [h1, h2, h3] } = await db.users.findHistory(id) - assert(h1.type === "write") - assert(h1.value.username === mockUser1.username) - assert(h1.timestamp.valueOf() <= h2.timestamp.valueOf()) - assert(h2.type === "write") - assert(h2.value.username === mockUser2.username) - assert(h2.timestamp.valueOf() <= h3.timestamp.valueOf()) - assert(h3.type === "write") - assert(h3.value.username === mockUser3.username) - }) + const { result: [h1, h2, h3] } = await db.users.findHistory(id); + assert(h1.type === "write"); + assert(h1.value.username === mockUser1.username); + assert(h1.timestamp.valueOf() <= h2.timestamp.valueOf()); + assert(h2.type === "write"); + assert(h2.value.username === mockUser2.username); + assert(h2.timestamp.valueOf() <= h3.timestamp.valueOf()); + assert(h3.type === "write"); + assert(h3.value.username === mockUser3.username); + }); }, - ) + ); await t.step( "Should persist history of multiple inserts in correct order after deleting", @@ -51,35 +51,35 @@ Deno.test("indexable_collection - history", async (t) => { age: "secondary", }, }), - }) + }); - const id = "id" - await db.users.set(id, mockUser1, { overwrite: true }) - await sleep(10) - await db.users.delete(id) - await sleep(10) - await db.users.set(id, mockUser2, { overwrite: true }) - await sleep(10) - await db.users.set(id, mockUser3, { overwrite: true }) - await sleep(10) - await db.users.delete(id) + const id = "id"; + await db.users.set(id, mockUser1, { overwrite: true }); + await sleep(10); + await db.users.delete(id); + await sleep(10); + await db.users.set(id, mockUser2, { overwrite: true }); + await sleep(10); + await db.users.set(id, mockUser3, { overwrite: true }); + await sleep(10); + await db.users.delete(id); - const { result: [h1, h2, h3, h4, h5] } = await db.users.findHistory(id) - assert(h1.type === "write") - assert(h1.value.username === mockUser1.username) - assert(h1.timestamp.valueOf() <= h2.timestamp.valueOf()) - assert(h2.type === "delete") - assert(h2.timestamp.valueOf() <= h3.timestamp.valueOf()) - assert(h3.type === "write") - assert(h3.value.username === mockUser2.username) - assert(h3.timestamp.valueOf() <= h4.timestamp.valueOf()) - assert(h4.type === "write") - assert(h4.value.username === mockUser3.username) - assert(h4.timestamp.valueOf() <= h5.timestamp.valueOf()) - assert(h5.type === "delete") - }) + const { result: [h1, h2, h3, h4, h5] } = await db.users.findHistory(id); + assert(h1.type === "write"); + assert(h1.value.username === mockUser1.username); + assert(h1.timestamp.valueOf() <= h2.timestamp.valueOf()); + assert(h2.type === "delete"); + assert(h2.timestamp.valueOf() <= h3.timestamp.valueOf()); + assert(h3.type === "write"); + assert(h3.value.username === mockUser2.username); + assert(h3.timestamp.valueOf() <= h4.timestamp.valueOf()); + assert(h4.type === "write"); + assert(h4.value.username === mockUser3.username); + assert(h4.timestamp.valueOf() <= h5.timestamp.valueOf()); + assert(h5.type === "delete"); + }); }, - ) + ); await t.step( "Should persist history of multiple inserts and updates in correct order", @@ -93,27 +93,27 @@ Deno.test("indexable_collection - history", async (t) => { age: "secondary", }, }), - }) + }); - const id = "id" - await db.users.set(id, mockUser1, { overwrite: true }) - await sleep(10) - await db.users.update(id, mockUser2) - await sleep(10) - await db.users.update(id, mockUser3) + const id = "id"; + await db.users.set(id, mockUser1, { overwrite: true }); + await sleep(10); + await db.users.update(id, mockUser2); + await sleep(10); + await db.users.update(id, mockUser3); - const { result: [h1, h2, h3] } = await db.users.findHistory(id) - assert(h1.type === "write") - assert(h1.value.username === mockUser1.username) - assert(h1.timestamp.valueOf() <= h2.timestamp.valueOf()) - assert(h2.type === "write") - assert(h2.value.username === mockUser2.username) - assert(h2.timestamp.valueOf() <= h3.timestamp.valueOf()) - assert(h3.type === "write") - assert(h3.value.username === mockUser3.username) - }) + const { result: [h1, h2, h3] } = await db.users.findHistory(id); + assert(h1.type === "write"); + assert(h1.value.username === mockUser1.username); + assert(h1.timestamp.valueOf() <= h2.timestamp.valueOf()); + assert(h2.type === "write"); + assert(h2.value.username === mockUser2.username); + assert(h2.timestamp.valueOf() <= h3.timestamp.valueOf()); + assert(h3.type === "write"); + assert(h3.value.username === mockUser3.username); + }); }, - ) + ); await t.step( "Should persist version history of insert and delete by deleteMany()", @@ -127,30 +127,30 @@ Deno.test("indexable_collection - history", async (t) => { age: "secondary", }, }), - }) + }); - const id = "id" - await db.users.set(id, mockUser1, { overwrite: true }) - await sleep(10) - await db.users.deleteMany() - await sleep(10) - await db.users.set(id, mockUser2, { overwrite: true }) - await sleep(10) - await db.users.deleteMany({ filter: () => true }) + const id = "id"; + await db.users.set(id, mockUser1, { overwrite: true }); + await sleep(10); + await db.users.deleteMany(); + await sleep(10); + await db.users.set(id, mockUser2, { overwrite: true }); + await sleep(10); + await db.users.deleteMany({ filter: () => true }); - const { result: [h1, h2, h3, h4] } = await db.users.findHistory(id) - assert(h1.type === "write") - assert(h1.value.username === mockUser1.username) - assert(h1.timestamp.valueOf() <= h2.timestamp.valueOf()) - assert(h2.type === "delete") - assert(h2.timestamp.valueOf() <= h3.timestamp.valueOf()) - assert(h3.type === "write") - assert(h3.value.username === mockUser2.username) - assert(h3.timestamp.valueOf() <= h4.timestamp.valueOf()) - assert(h4.type === "delete") - }) + const { result: [h1, h2, h3, h4] } = await db.users.findHistory(id); + assert(h1.type === "write"); + assert(h1.value.username === mockUser1.username); + assert(h1.timestamp.valueOf() <= h2.timestamp.valueOf()); + assert(h2.type === "delete"); + assert(h2.timestamp.valueOf() <= h3.timestamp.valueOf()); + assert(h3.type === "write"); + assert(h3.value.username === mockUser2.username); + assert(h3.timestamp.valueOf() <= h4.timestamp.valueOf()); + assert(h4.type === "delete"); + }); }, - ) + ); await t.step( "Should not find history", @@ -163,19 +163,19 @@ Deno.test("indexable_collection - history", async (t) => { age: "secondary", }, }), - }) + }); - const id = "id" - await db.users.set(id, mockUser1, { overwrite: true }) - await db.users.update(id, mockUser2) - await db.users.delete(id) - await db.users.deleteMany() + const id = "id"; + await db.users.set(id, mockUser1, { overwrite: true }); + await db.users.update(id, mockUser2); + await db.users.delete(id); + await db.users.deleteMany(); - const { result: history } = await db.users.findHistory(id) - assert(history.length === 0) - }) + const { result: history } = await db.users.findHistory(id); + assert(history.length === 0); + }); }, - ) + ); await t.step("Should find filtered history", async () => { await useKv(async (kv) => { @@ -187,39 +187,39 @@ Deno.test("indexable_collection - history", async (t) => { age: "secondary", }, }), - }) + }); - const id = "id" - await db.users.set(id, mockUser1, { overwrite: true }) - await db.users.delete(id) - await db.users.set(id, mockUser2, { overwrite: true }) - await db.users.update(id, mockUser3) + const id = "id"; + await db.users.set(id, mockUser1, { overwrite: true }); + await db.users.delete(id); + await db.users.set(id, mockUser2, { overwrite: true }); + await db.users.update(id, mockUser3); const { result: history1 } = await db.users.findHistory(id, { filter: (entry) => entry.type === "delete", - }) + }); const { result: history2 } = await db.users.findHistory(id, { filter: (entry) => entry.type === "write" && entry.value.age === mockUser1.age, - }) + }); - assert(history1.length === 1) - assert(history2.length === 2) + assert(history1.length === 1); + assert(history2.length === 2); assert( history2.some((h) => h.type === "write" && h.value.username === mockUser1.username ), - ) + ); assert( history2.some((h) => h.type === "write" && h.value.username === mockUser2.username ), - ) - }) - }) + ); + }); + }); await t.step("Should delete all document history", async () => { await useKv(async (kv) => { @@ -231,27 +231,27 @@ Deno.test("indexable_collection - history", async (t) => { age: "secondary", }, }), - }) + }); - const id = "id" - await db.users.set(id, mockUser1, { overwrite: true }) - await db.users.set(id, mockUser2, { overwrite: true }) - await db.users.set(id, mockUser3, { overwrite: true }) - const cr = await db.users.add(generateUsers(1)[0]) + const id = "id"; + await db.users.set(id, mockUser1, { overwrite: true }); + await db.users.set(id, mockUser2, { overwrite: true }); + await db.users.set(id, mockUser3, { overwrite: true }); + const cr = await db.users.add(generateUsers(1)[0]); - assert(cr.ok) + assert(cr.ok); - const { result: history1_1 } = await db.users.findHistory(id) - const { result: history1_2 } = await db.users.findHistory(cr.id) - assert(history1_1.length === 3) - assert(history1_2.length === 1) + const { result: history1_1 } = await db.users.findHistory(id); + const { result: history1_2 } = await db.users.findHistory(cr.id); + assert(history1_1.length === 3); + assert(history1_2.length === 1); - await db.users.deleteHistory(id) + await db.users.deleteHistory(id); - const { result: history2_1 } = await db.users.findHistory(id) - const { result: history2_2 } = await db.users.findHistory(cr.id) - assert(history2_1.length === 0) - assert(history2_2.length === 1) - }) - }) -}) + const { result: history2_1 } = await db.users.findHistory(id); + const { result: history2_2 } = await db.users.findHistory(cr.id); + assert(history2_1.length === 0); + assert(history2_2.length === 1); + }); + }); +}); diff --git a/tests/indexable_collection/listenQueue.test.ts b/tests/indexable_collection/listenQueue.test.ts index 4c3d7d5..17ed319 100644 --- a/tests/indexable_collection/listenQueue.test.ts +++ b/tests/indexable_collection/listenQueue.test.ts @@ -4,40 +4,40 @@ import { type KvValue, model, type QueueMessage, -} from "../../mod.ts" +} from "../../mod.ts"; import { KVDEX_KEY_PREFIX, UNDELIVERED_KEY_PREFIX, -} from "../../src/constants.ts" -import { createHandlerId, extendKey } from "../../src/utils.ts" -import { assert } from "../test.deps.ts" -import type { User } from "../models.ts" -import { createResolver, sleep, useKv } from "../utils.ts" +} from "../../src/constants.ts"; +import { createHandlerId, extendKey } from "../../src/utils.ts"; +import { assert } from "../test.deps.ts"; +import type { User } from "../models.ts"; +import { createResolver, sleep, useKv } from "../utils.ts"; Deno.test("indexable_collection - listenQueue", async (t) => { await t.step("Should receive message with string data", async () => { await useKv(async (kv) => { - const data = "data" - const undeliveredId = "id" + const data = "data"; + const undeliveredId = "id"; const db = kvdex(kv, { i_users: collection(model(), { indices: {} }), - }) + }); - const sleeper = createResolver() - const handlerId = createHandlerId(db.i_users._keys.base, undefined) - let assertion = false + const sleeper = createResolver(); + const handlerId = createHandlerId(db.i_users._keys.base, undefined); + let assertion = false; const listener = db.i_users.listenQueue((msgData) => { - assertion = msgData === data - sleeper.resolve() - }) + assertion = msgData === data; + sleeper.resolve(); + }); const msg: QueueMessage = { __is_undefined__: false, __handlerId__: handlerId, __data__: data, - } + }; await kv.enqueue(msg, { keysIfUndelivered: [ @@ -47,36 +47,36 @@ Deno.test("indexable_collection - listenQueue", async (t) => { undeliveredId, ), ], - }) + }); - await sleeper.promise + await sleeper.promise; - const undelivered = await db.i_users.findUndelivered(undeliveredId) - assert(assertion || typeof undelivered?.value === typeof data) + const undelivered = await db.i_users.findUndelivered(undeliveredId); + assert(assertion || typeof undelivered?.value === typeof data); - return async () => await listener - }) - }) + return async () => await listener; + }); + }); await t.step("Should not receive db queue message", async () => { await useKv(async (kv) => { const db = kvdex(kv, { i_users: collection(model(), { indices: {} }), - }) + }); - let assertion = true + let assertion = true; const listener = db.i_users.listenQueue(() => { - assertion = false - }) + assertion = false; + }); - await db.enqueue("data") + await db.enqueue("data"); - await sleep(500) + await sleep(500); - assert(assertion) + assert(assertion); - return async () => await listener - }) - }) -}) + return async () => await listener; + }); + }); +}); diff --git a/tests/indexable_collection/map.test.ts b/tests/indexable_collection/map.test.ts index f4befdd..ac10a62 100644 --- a/tests/indexable_collection/map.test.ts +++ b/tests/indexable_collection/map.test.ts @@ -1,24 +1,24 @@ -import { assert } from "../test.deps.ts" -import { generateUsers, useDb } from "../utils.ts" +import { assert } from "../test.deps.ts"; +import { generateUsers, useDb } from "../utils.ts"; Deno.test("indexable_collection - map", async (t) => { await t.step( "Should run callback mapper function for each document in the collection", async () => { await useDb(async (db) => { - const users = generateUsers(1_000) - const cr = await db.i_users.addMany(users) - assert(cr.ok) + const users = generateUsers(1_000); + const cr = await db.i_users.addMany(users); + assert(cr.ok); - const { result } = await db.i_users.map((doc) => doc.value.username) + const { result } = await db.i_users.map((doc) => doc.value.username); - assert(result.length === users.length) + assert(result.length === users.length); assert( users.every((user) => result.some((username) => username === user.username) ), - ) - }) + ); + }); }, - ) -}) + ); +}); diff --git a/tests/indexable_collection/mapBySecondaryIndex.test.ts b/tests/indexable_collection/mapBySecondaryIndex.test.ts index 57ca965..f89a874 100644 --- a/tests/indexable_collection/mapBySecondaryIndex.test.ts +++ b/tests/indexable_collection/mapBySecondaryIndex.test.ts @@ -1,26 +1,26 @@ -import { assert } from "../test.deps.ts" -import { mockUser1, mockUser2, mockUser3 } from "../mocks.ts" -import { useDb } from "../utils.ts" +import { assert } from "../test.deps.ts"; +import { mockUser1, mockUser2, mockUser3 } from "../mocks.ts"; +import { useDb } from "../utils.ts"; Deno.test("indexable_collection - mapBySecondaryIndex", async (t) => { await t.step( "Should run callback mapper function for each document in the collection by secondary index", async () => { await useDb(async (db) => { - const cr = await db.i_users.addMany([mockUser1, mockUser2, mockUser3]) - assert(cr.ok) + const cr = await db.i_users.addMany([mockUser1, mockUser2, mockUser3]); + assert(cr.ok); const { result } = await db.i_users.mapBySecondaryIndex( "age", mockUser1.age, (doc) => doc.value.username, - ) + ); - assert(result.length === 2) - assert(result.some((username) => username === mockUser1.username)) - assert(result.some((username) => username === mockUser2.username)) - assert(!result.some((username) => username === mockUser3.username)) - }) + assert(result.length === 2); + assert(result.some((username) => username === mockUser1.username)); + assert(result.some((username) => username === mockUser2.username)); + assert(!result.some((username) => username === mockUser3.username)); + }); }, - ) -}) + ); +}); diff --git a/tests/indexable_collection/mapBySecondaryOrder.test.ts b/tests/indexable_collection/mapBySecondaryOrder.test.ts index f2227f6..973ac1a 100644 --- a/tests/indexable_collection/mapBySecondaryOrder.test.ts +++ b/tests/indexable_collection/mapBySecondaryOrder.test.ts @@ -1,29 +1,29 @@ -import { assert } from "../test.deps.ts" +import { assert } from "../test.deps.ts"; import { mockUser1, mockUser2, mockUser3, mockUsersWithAlteredAge, -} from "../mocks.ts" -import { useDb } from "../utils.ts" +} from "../mocks.ts"; +import { useDb } from "../utils.ts"; Deno.test("indexable_collection - mapBySecondaryOrder", async (t) => { await t.step( "Should run callback mapper function for each document in the collection by secondary order", async () => { await useDb(async (db) => { - const cr = await db.i_users.addMany(mockUsersWithAlteredAge) - assert(cr.ok) + const cr = await db.i_users.addMany(mockUsersWithAlteredAge); + assert(cr.ok); const { result } = await db.i_users.mapBySecondaryOrder( "age", (doc) => doc.value.username, - ) + ); - assert(result[0] === mockUser3.username) - assert(result[1] === mockUser1.username) - assert(result[2] === mockUser2.username) - }) + assert(result[0] === mockUser3.username); + assert(result[1] === mockUser1.username); + assert(result[2] === mockUser2.username); + }); }, - ) -}) + ); +}); diff --git a/tests/indexable_collection/properties.test.ts b/tests/indexable_collection/properties.test.ts index f67cd72..b20ccc7 100644 --- a/tests/indexable_collection/properties.test.ts +++ b/tests/indexable_collection/properties.test.ts @@ -4,40 +4,40 @@ import { kvdex, type KvValue, model, -} from "../../mod.ts" +} from "../../mod.ts"; import { ID_KEY_PREFIX, KVDEX_KEY_PREFIX, PRIMARY_INDEX_KEY_PREFIX, SECONDARY_INDEX_KEY_PREFIX, -} from "../../src/constants.ts" -import { extendKey, keyEq } from "../../src/utils.ts" -import { assert } from "../test.deps.ts" -import { mockUser1 } from "../mocks.ts" -import type { User } from "../models.ts" -import { generateUsers, sleep, useDb, useKv } from "../utils.ts" -import { mockUser2 } from "../mocks.ts" -import { mockUser3 } from "../mocks.ts" +} from "../../src/constants.ts"; +import { extendKey, keyEq } from "../../src/utils.ts"; +import { assert } from "../test.deps.ts"; +import { mockUser1 } from "../mocks.ts"; +import type { User } from "../models.ts"; +import { generateUsers, sleep, useDb, useKv } from "../utils.ts"; +import { mockUser2 } from "../mocks.ts"; +import { mockUser3 } from "../mocks.ts"; Deno.test("indexable_collection - properties", async (t) => { await t.step("Keys should have the correct prefixes", async () => { await useDb((db) => { - const baseKey = db.i_users._keys.base - const idKey = db.i_users._keys.id - const primaryIndexKey = db.i_users._keys.primaryIndex - const secondaryIndexKey = db.i_users._keys.secondaryIndex - const prefix = extendKey([KVDEX_KEY_PREFIX], "i_users") - - assert(keyEq(baseKey, prefix)) - assert(keyEq(idKey, extendKey(prefix, ID_KEY_PREFIX))) + const baseKey = db.i_users._keys.base; + const idKey = db.i_users._keys.id; + const primaryIndexKey = db.i_users._keys.primaryIndex; + const secondaryIndexKey = db.i_users._keys.secondaryIndex; + const prefix = extendKey([KVDEX_KEY_PREFIX], "i_users"); + + assert(keyEq(baseKey, prefix)); + assert(keyEq(idKey, extendKey(prefix, ID_KEY_PREFIX))); assert( keyEq(primaryIndexKey, extendKey(prefix, PRIMARY_INDEX_KEY_PREFIX)), - ) + ); assert( keyEq(secondaryIndexKey, extendKey(prefix, SECONDARY_INDEX_KEY_PREFIX)), - ) - }) - }) + ); + }); + }); await t.step("Should generate ids with custom id generator", async () => { await useKv((kv) => { @@ -50,196 +50,196 @@ Deno.test("indexable_collection - properties", async (t) => { idGenerator: (data) => data.username, indices: {}, }), - }) + }); - const id1 = db.users1._idGenerator(mockUser1) - const id2 = db.users2._idGenerator(mockUser1) + const id1 = db.users1._idGenerator(mockUser1); + const id2 = db.users2._idGenerator(mockUser1); - assert(typeof id1 === "number") - assert(id2 === mockUser1.username) - }) - }) + assert(typeof id1 === "number"); + assert(id2 === mockUser1.username); + }); + }); await t.step("Should select using cursor pagination", async () => { await useDb(async (db) => { - const users = generateUsers(1_000) - const cr = await db.i_users.addMany(users) - assert(cr.ok) + const users = generateUsers(1_000); + const cr = await db.i_users.addMany(users); + assert(cr.ok); - const selected: Document[] = [] - let cursor: string | undefined = undefined + const selected: Document[] = []; + let cursor: string | undefined = undefined; do { const query = await db.i_users.getMany({ cursor, limit: users.length / 10, - }) + }); - selected.push(...query.result) - cursor = query.cursor - } while (cursor) + selected.push(...query.result); + cursor = query.cursor; + } while (cursor); assert( users.every((user) => selected.some((doc) => doc.value.username === user.username) ), - ) - }) - }) + ); + }); + }); await t.step("Should select using offset pagination", async () => { await useDb(async (db) => { - const users = generateUsers(1_000) - const cr = await db.i_users.addMany(users) - assert(cr.ok) + const users = generateUsers(1_000); + const cr = await db.i_users.addMany(users); + assert(cr.ok); - const selected: Document[] = [] - const limit = 50 + const selected: Document[] = []; + const limit = 50; for (let offset = 0; offset < users.length; offset += limit) { - const { result } = await db.i_users.getMany({ offset, limit }) - selected.push(...result) - assert(result.length === 50) + const { result } = await db.i_users.getMany({ offset, limit }); + selected.push(...result); + assert(result.length === 50); } assert( users.every((user) => selected.some((doc) => doc.value.username === user.username) ), - ) - }) - }) + ); + }); + }); await t.step("Should select filtered", async () => { await useDb(async (db) => { - const users = generateUsers(10) - const cr = await db.i_users.addMany(users) - const count1 = await db.i_users.count() - assert(cr.ok) - assert(count1 === users.length) + const users = generateUsers(10); + const cr = await db.i_users.addMany(users); + const count1 = await db.i_users.count(); + assert(cr.ok); + assert(count1 === users.length); - const sliced = users.slice(5, 7) + const sliced = users.slice(5, 7); const { result } = await db.i_users.getMany({ filter: (doc) => sliced.map((user) => user.username).includes( doc.value.username, ), - }) + }); - assert(result.length === sliced.length) + assert(result.length === sliced.length); assert( result.every((doc) => sliced.some((user) => user.username === doc.value.username) ), - ) - }) - }) + ); + }); + }); await t.step("Should select in reverse", async () => { await useDb(async (db) => { - const users = generateUsers(10) - const cr = await db.i_users.addMany(users) - const count1 = await db.i_users.count() - assert(cr.ok) - assert(count1 === users.length) + const users = generateUsers(10); + const cr = await db.i_users.addMany(users); + const count1 = await db.i_users.count(); + assert(cr.ok); + assert(count1 === users.length); - const query1 = await db.i_users.getMany() - const query2 = await db.i_users.getMany({ reverse: true }) + const query1 = await db.i_users.getMany(); + const query2 = await db.i_users.getMany({ reverse: true }); assert( JSON.stringify(query1.result) === JSON.stringify(query2.result.reverse()), - ) - }) - }) + ); + }); + }); await t.step("Should select from start id", async () => { await useDb(async (db) => { - const users = generateUsers(10) - const cr = await db.i_users.addMany(users) - const count1 = await db.i_users.count() - assert(cr.ok) - assert(count1 === users.length) + const users = generateUsers(10); + const cr = await db.i_users.addMany(users); + const count1 = await db.i_users.count(); + assert(cr.ok); + assert(count1 === users.length); - const index = 5 + const index = 5; - const query1 = await db.i_users.getMany() + const query1 = await db.i_users.getMany(); const query2 = await db.i_users.getMany({ startId: query1.result.at(index)?.id, - }) + }); - assert(query2.result.length === query1.result.slice(index).length) + assert(query2.result.length === query1.result.slice(index).length); assert( query2.result.every((doc1) => query1.result.slice(index).some((doc2) => doc1.id === doc2.id) ), - ) - }) - }) + ); + }); + }); await t.step("Should select until end id", async () => { await useDb(async (db) => { - const users = generateUsers(10) - const cr = await db.i_users.addMany(users) - const count1 = await db.i_users.count() - assert(cr.ok) - assert(count1 === users.length) + const users = generateUsers(10); + const cr = await db.i_users.addMany(users); + const count1 = await db.i_users.count(); + assert(cr.ok); + assert(count1 === users.length); - const index = 5 + const index = 5; - const query1 = await db.i_users.getMany() + const query1 = await db.i_users.getMany(); const query2 = await db.i_users.getMany({ endId: query1.result.at(index)?.id, - }) + }); - assert(query2.result.length === query1.result.slice(0, index).length) + assert(query2.result.length === query1.result.slice(0, index).length); assert( query2.result.every((doc1) => query1.result.slice(0, index).some((doc2) => doc1.id === doc2.id) ), - ) - }) - }) + ); + }); + }); await t.step("Should select from start id to end id", async () => { await useDb(async (db) => { - const users = generateUsers(10) - const cr = await db.i_users.addMany(users) - const count1 = await db.i_users.count() - assert(cr.ok) - assert(count1 === users.length) + const users = generateUsers(10); + const cr = await db.i_users.addMany(users); + const count1 = await db.i_users.count(); + assert(cr.ok); + assert(count1 === users.length); - const index1 = 5 - const index2 = 7 + const index1 = 5; + const index2 = 7; - const query1 = await db.i_users.getMany() + const query1 = await db.i_users.getMany(); const query2 = await db.i_users.getMany({ startId: query1.result.at(index1)?.id, endId: query1.result.at(index2)?.id, - }) + }); assert( query2.result.length === query1.result.slice(index1, index2).length, - ) + ); assert( query2.result.every((doc1) => query1.result.slice(index1, index2).some((doc2) => doc1.id === doc2.id ) ), - ) - }) - }) + ); + }); + }); await t.step("Should allow optional indices", async () => { await useKv(async (kv) => { const db = kvdex(kv, { i: collection( model<{ - oblPrimary: string - oblSecondary: number - optPrimary?: string - optSecondary?: number - check?: Date + oblPrimary: string; + oblSecondary: number; + optPrimary?: string; + optSecondary?: number; + check?: Date; }>(), { indices: { @@ -250,168 +250,168 @@ Deno.test("indexable_collection - properties", async (t) => { }, }, ), - }) + }); const cr1 = await db.i.add({ oblPrimary: "oblPrimary1", oblSecondary: 10, - }) + }); const cr2 = await db.i.add({ oblPrimary: "oblPrimary2", oblSecondary: 10, optPrimary: "optPrimary2", optSecondary: 20, - }) + }); - assert(cr1.ok) - assert(cr2.ok) + assert(cr1.ok); + assert(cr2.ok); const byOptPrimary2 = await db.i.findByPrimaryIndex( "optPrimary", "optPrimary2", - ) + ); const byOptSecondary2 = await db.i.findBySecondaryIndex( "optSecondary", 20, - ) + ); - assert(byOptPrimary2?.id === cr2.id) - assert(byOptSecondary2.result.length === 1) - assert(byOptSecondary2.result.some((i) => i.id === cr2.id)) + assert(byOptPrimary2?.id === cr2.id); + assert(byOptSecondary2.result.length === 1); + assert(byOptSecondary2.result.some((i) => i.id === cr2.id)); const cr3 = await db.i.add({ oblPrimary: "oblPrimary3", oblSecondary: 10, optPrimary: "optPrimary2", optSecondary: 20, - }) + }); - assert(!cr3.ok) + assert(!cr3.ok); const cr4 = await db.i.add({ oblPrimary: "oblPrimary4", oblSecondary: 10, optPrimary: "optPrimary4", optSecondary: 20, - }) + }); - assert(cr4.ok) + assert(cr4.ok); const byOptPrimary4 = await db.i.findByPrimaryIndex( "optPrimary", "optPrimary4", - ) + ); const byOptSecondary4 = await db.i.findBySecondaryIndex( "optSecondary", 20, - ) + ); - assert(byOptPrimary4?.id === cr4.id) - assert(byOptSecondary4.result.length === 2) - assert(byOptSecondary4.result.some((i) => i.id === cr2.id)) - assert(byOptSecondary4.result.some((i) => i.id === cr4.id)) - }) - }) + assert(byOptPrimary4?.id === cr4.id); + assert(byOptSecondary4.result.length === 2); + assert(byOptSecondary4.result.some((i) => i.id === cr2.id)); + assert(byOptSecondary4.result.some((i) => i.id === cr4.id)); + }); + }); await t.step("Should select limited by database reads", async () => { await useDb(async (db) => { - const cr1 = await db.i_users.add(mockUser1) - await sleep(10) - const cr2 = await db.i_users.add(mockUser2) - await sleep(10) - const cr3 = await db.i_users.add(mockUser3) + const cr1 = await db.i_users.add(mockUser1); + await sleep(10); + const cr2 = await db.i_users.add(mockUser2); + await sleep(10); + const cr3 = await db.i_users.add(mockUser3); - assert(cr1.ok) - assert(cr2.ok) - assert(cr3.ok) + assert(cr1.ok); + assert(cr2.ok); + assert(cr3.ok); const { result } = await db.i_users.getMany({ limit: 2, filter: (doc) => doc.value.username !== mockUser1.username, - }) + }); - assert(result.every((doc) => doc.value.username === mockUser2.username)) - }) - }) + assert(result.every((doc) => doc.value.username === mockUser2.username)); + }); + }); await t.step("Should select limited by result count", async () => { await useDb(async (db) => { - const cr1 = await db.i_users.add(mockUser1) - await sleep(10) - const cr2 = await db.i_users.add(mockUser2) - await sleep(10) - const cr3 = await db.i_users.add(mockUser3) + const cr1 = await db.i_users.add(mockUser1); + await sleep(10); + const cr2 = await db.i_users.add(mockUser2); + await sleep(10); + const cr3 = await db.i_users.add(mockUser3); - assert(cr1.ok) - assert(cr2.ok) - assert(cr3.ok) + assert(cr1.ok); + assert(cr2.ok); + assert(cr3.ok); const { result } = await db.i_users.getMany({ take: 2, filter: (doc) => doc.value.username !== mockUser1.username, - }) + }); - assert(result.length === 2) - assert(result.some((doc) => doc.value.username === mockUser2.username)) - assert(result.some((doc) => doc.value.username === mockUser3.username)) - }) - }) + assert(result.length === 2); + assert(result.some((doc) => doc.value.username === mockUser2.username)); + assert(result.some((doc) => doc.value.username === mockUser3.username)); + }); + }); await t.step("Should correctly infer type of document", async () => { await useDb(async (db) => { - const doc = await db.i_users.find("") + const doc = await db.i_users.find(""); if (doc) { - doc.value.age.valueOf() + doc.value.age.valueOf(); } - }) - }) + }); + }); await t.step( "Should correctly infer insert and output of asymmetric model", async () => { await useDb(async (db) => { - const cr = await db.ai_users.add(mockUser1) - assert(cr.ok) - - const doc = await db.ai_users.find(cr.id) - assert(doc !== null) - assert(typeof doc.value.addressStr === "string") - assert(typeof doc.value.decadeAge === "number") - assert(typeof doc.value.name === "string") - }) + const cr = await db.ai_users.add(mockUser1); + assert(cr.ok); + + const doc = await db.ai_users.find(cr.id); + assert(doc !== null); + assert(typeof doc.value.addressStr === "string"); + assert(typeof doc.value.decadeAge === "number"); + assert(typeof doc.value.name === "string"); + }); }, - ) + ); await t.step("Should enable indexing using all available types", async () => { type Data = { - p: KvValue - s: KvValue - } + p: KvValue; + s: KvValue; + }; await useKv(async (kv) => { - const val1 = undefined - const val2 = null - const val3 = 10 - const val4 = "string" - const val5 = 10n - const val6 = true - const val7 = new Int8Array([10, 20, 30]) - const val8 = new Int16Array([10, 20, 30]) - const val9 = new Int32Array([10, 20, 30]) - const val10 = new BigInt64Array([10n, 20n, 30n]) - const val11 = new Uint8Array([10, 20, 30]) - const val12 = new Uint16Array([10, 20, 30]) - const val13 = new Uint32Array([10, 20, 30]) - const val14 = new BigUint64Array([10n, 20n, 30n]) - const val15 = new Uint8ClampedArray([10, 20, 30]) - const val16 = new Float32Array([10.203423878293472837429384]) - const val17 = new Float64Array([10.203423878293472837429384]) - const val18 = new Uint8Array([10, 20, 30]).buffer - const val19 = new Date() - const val20 = new RegExp("[0-9]") - const val21 = new DataView(new Uint8Array([10, 20, 30]).buffer) - const val22 = new Error("error") + const val1 = undefined; + const val2 = null; + const val3 = 10; + const val4 = "string"; + const val5 = 10n; + const val6 = true; + const val7 = new Int8Array([10, 20, 30]); + const val8 = new Int16Array([10, 20, 30]); + const val9 = new Int32Array([10, 20, 30]); + const val10 = new BigInt64Array([10n, 20n, 30n]); + const val11 = new Uint8Array([10, 20, 30]); + const val12 = new Uint16Array([10, 20, 30]); + const val13 = new Uint32Array([10, 20, 30]); + const val14 = new BigUint64Array([10n, 20n, 30n]); + const val15 = new Uint8ClampedArray([10, 20, 30]); + const val16 = new Float32Array([10.203423878293472837429384]); + const val17 = new Float64Array([10.203423878293472837429384]); + const val18 = new Uint8Array([10, 20, 30]).buffer; + const val19 = new Date(); + const val20 = new RegExp("[0-9]"); + const val21 = new DataView(new Uint8Array([10, 20, 30]).buffer); + const val22 = new Error("error"); const val23 = [ val1, val2, @@ -435,7 +435,7 @@ Deno.test("indexable_collection - properties", async (t) => { val20, val21, val22, - ] + ]; const val24 = { val1, val2, @@ -460,8 +460,8 @@ Deno.test("indexable_collection - properties", async (t) => { val21, val22, val23, - } - const val25 = new Set(val23) + }; + const val25 = new Set(val23); const val26 = new Map([ ["val1", val1], ["val2", val2], @@ -485,7 +485,7 @@ Deno.test("indexable_collection - properties", async (t) => { ["val20", val20], ["val21", val21], ["val22", val22], - ]) + ]); const db = kvdex(kv, { val1: collection(model(), { @@ -644,115 +644,115 @@ Deno.test("indexable_collection - properties", async (t) => { s: "secondary", }, }), - }) - - const cr1 = await db.val1.add({ p: val1, s: val1 }) - const cr2 = await db.val2.add({ p: val2, s: val2 }) - const cr3 = await db.val3.add({ p: val3, s: val3 }) - const cr4 = await db.val4.add({ p: val4, s: val4 }) - const cr5 = await db.val5.add({ p: val5, s: val5 }) - const cr6 = await db.val6.add({ p: val6, s: val6 }) - const cr7 = await db.val7.add({ p: val7, s: val7 }) - const cr8 = await db.val8.add({ p: val8, s: val8 }) - const cr9 = await db.val9.add({ p: val9, s: val9 }) - const cr10 = await db.val10.add({ p: val10, s: val10 }) - const cr11 = await db.val11.add({ p: val11, s: val11 }) - const cr12 = await db.val12.add({ p: val12, s: val12 }) - const cr13 = await db.val13.add({ p: val13, s: val13 }) - const cr14 = await db.val14.add({ p: val14, s: val14 }) - const cr15 = await db.val15.add({ p: val15, s: val15 }) - const cr16 = await db.val16.add({ p: val16, s: val16 }) - const cr17 = await db.val17.add({ p: val17, s: val17 }) - const cr18 = await db.val18.add({ p: val18, s: val18 }) - const cr19 = await db.val19.add({ p: val19, s: val19 }) - const cr20 = await db.val20.add({ p: val20, s: val20 }) - const cr21 = await db.val21.add({ p: val21, s: val21 }) - const cr22 = await db.val22.add({ p: val22, s: val22 }) - const cr23 = await db.val23.add({ p: val23, s: val23 }) - const cr24 = await db.val24.add({ p: val24, s: val24 }) - const cr25 = await db.val25.add({ p: val25, s: val25 }) - const cr26 = await db.val26.add({ p: val26, s: val26 }) - - assert(cr1.ok) - assert(cr2.ok) - assert(cr3.ok) - assert(cr4.ok) - assert(cr5.ok) - assert(cr6.ok) - assert(cr7.ok) - assert(cr8.ok) - assert(cr9.ok) - assert(cr10.ok) - assert(cr11.ok) - assert(cr12.ok) - assert(cr13.ok) - assert(cr14.ok) - assert(cr15.ok) - assert(cr16.ok) - assert(cr17.ok) - assert(cr18.ok) - assert(cr19.ok) - assert(cr20.ok) - assert(cr21.ok) - assert(cr22.ok) - assert(cr23.ok) - assert(cr24.ok) - assert(cr25.ok) - assert(cr26.ok) + }); + + const cr1 = await db.val1.add({ p: val1, s: val1 }); + const cr2 = await db.val2.add({ p: val2, s: val2 }); + const cr3 = await db.val3.add({ p: val3, s: val3 }); + const cr4 = await db.val4.add({ p: val4, s: val4 }); + const cr5 = await db.val5.add({ p: val5, s: val5 }); + const cr6 = await db.val6.add({ p: val6, s: val6 }); + const cr7 = await db.val7.add({ p: val7, s: val7 }); + const cr8 = await db.val8.add({ p: val8, s: val8 }); + const cr9 = await db.val9.add({ p: val9, s: val9 }); + const cr10 = await db.val10.add({ p: val10, s: val10 }); + const cr11 = await db.val11.add({ p: val11, s: val11 }); + const cr12 = await db.val12.add({ p: val12, s: val12 }); + const cr13 = await db.val13.add({ p: val13, s: val13 }); + const cr14 = await db.val14.add({ p: val14, s: val14 }); + const cr15 = await db.val15.add({ p: val15, s: val15 }); + const cr16 = await db.val16.add({ p: val16, s: val16 }); + const cr17 = await db.val17.add({ p: val17, s: val17 }); + const cr18 = await db.val18.add({ p: val18, s: val18 }); + const cr19 = await db.val19.add({ p: val19, s: val19 }); + const cr20 = await db.val20.add({ p: val20, s: val20 }); + const cr21 = await db.val21.add({ p: val21, s: val21 }); + const cr22 = await db.val22.add({ p: val22, s: val22 }); + const cr23 = await db.val23.add({ p: val23, s: val23 }); + const cr24 = await db.val24.add({ p: val24, s: val24 }); + const cr25 = await db.val25.add({ p: val25, s: val25 }); + const cr26 = await db.val26.add({ p: val26, s: val26 }); + + assert(cr1.ok); + assert(cr2.ok); + assert(cr3.ok); + assert(cr4.ok); + assert(cr5.ok); + assert(cr6.ok); + assert(cr7.ok); + assert(cr8.ok); + assert(cr9.ok); + assert(cr10.ok); + assert(cr11.ok); + assert(cr12.ok); + assert(cr13.ok); + assert(cr14.ok); + assert(cr15.ok); + assert(cr16.ok); + assert(cr17.ok); + assert(cr18.ok); + assert(cr19.ok); + assert(cr20.ok); + assert(cr21.ok); + assert(cr22.ok); + assert(cr23.ok); + assert(cr24.ok); + assert(cr25.ok); + assert(cr26.ok); //const byPrimary1 = await db.val1.findByPrimaryIndex("p", val1) - const byPrimary2 = await db.val2.findByPrimaryIndex("p", val2) - const byPrimary3 = await db.val3.findByPrimaryIndex("p", val3) - const byPrimary4 = await db.val4.findByPrimaryIndex("p", val4) - const byPrimary5 = await db.val5.findByPrimaryIndex("p", val5) - const byPrimary6 = await db.val6.findByPrimaryIndex("p", val6) - const byPrimary7 = await db.val7.findByPrimaryIndex("p", val7) - const byPrimary8 = await db.val8.findByPrimaryIndex("p", val8) - const byPrimary9 = await db.val9.findByPrimaryIndex("p", val9) - const byPrimary10 = await db.val10.findByPrimaryIndex("p", val10) - const byPrimary11 = await db.val11.findByPrimaryIndex("p", val11) - const byPrimary12 = await db.val12.findByPrimaryIndex("p", val12) - const byPrimary13 = await db.val13.findByPrimaryIndex("p", val13) - const byPrimary14 = await db.val14.findByPrimaryIndex("p", val14) - const byPrimary15 = await db.val15.findByPrimaryIndex("p", val15) - const byPrimary16 = await db.val16.findByPrimaryIndex("p", val16) - const byPrimary17 = await db.val17.findByPrimaryIndex("p", val17) - const byPrimary18 = await db.val18.findByPrimaryIndex("p", val18) - const byPrimary19 = await db.val19.findByPrimaryIndex("p", val19) - const byPrimary20 = await db.val20.findByPrimaryIndex("p", val20) - const byPrimary21 = await db.val21.findByPrimaryIndex("p", val21) - const byPrimary22 = await db.val22.findByPrimaryIndex("p", val22) - const byPrimary23 = await db.val23.findByPrimaryIndex("p", val23) - const byPrimary24 = await db.val24.findByPrimaryIndex("p", val24) - const byPrimary25 = await db.val25.findByPrimaryIndex("p", val25) - const byPrimary26 = await db.val26.findByPrimaryIndex("p", val26) + const byPrimary2 = await db.val2.findByPrimaryIndex("p", val2); + const byPrimary3 = await db.val3.findByPrimaryIndex("p", val3); + const byPrimary4 = await db.val4.findByPrimaryIndex("p", val4); + const byPrimary5 = await db.val5.findByPrimaryIndex("p", val5); + const byPrimary6 = await db.val6.findByPrimaryIndex("p", val6); + const byPrimary7 = await db.val7.findByPrimaryIndex("p", val7); + const byPrimary8 = await db.val8.findByPrimaryIndex("p", val8); + const byPrimary9 = await db.val9.findByPrimaryIndex("p", val9); + const byPrimary10 = await db.val10.findByPrimaryIndex("p", val10); + const byPrimary11 = await db.val11.findByPrimaryIndex("p", val11); + const byPrimary12 = await db.val12.findByPrimaryIndex("p", val12); + const byPrimary13 = await db.val13.findByPrimaryIndex("p", val13); + const byPrimary14 = await db.val14.findByPrimaryIndex("p", val14); + const byPrimary15 = await db.val15.findByPrimaryIndex("p", val15); + const byPrimary16 = await db.val16.findByPrimaryIndex("p", val16); + const byPrimary17 = await db.val17.findByPrimaryIndex("p", val17); + const byPrimary18 = await db.val18.findByPrimaryIndex("p", val18); + const byPrimary19 = await db.val19.findByPrimaryIndex("p", val19); + const byPrimary20 = await db.val20.findByPrimaryIndex("p", val20); + const byPrimary21 = await db.val21.findByPrimaryIndex("p", val21); + const byPrimary22 = await db.val22.findByPrimaryIndex("p", val22); + const byPrimary23 = await db.val23.findByPrimaryIndex("p", val23); + const byPrimary24 = await db.val24.findByPrimaryIndex("p", val24); + const byPrimary25 = await db.val25.findByPrimaryIndex("p", val25); + const byPrimary26 = await db.val26.findByPrimaryIndex("p", val26); //assert(byPrimary1 !== null) - assert(byPrimary2 !== null) - assert(byPrimary3 !== null) - assert(byPrimary4 !== null) - assert(byPrimary5 !== null) - assert(byPrimary6 !== null) - assert(byPrimary7 !== null) - assert(byPrimary8 !== null) - assert(byPrimary9 !== null) - assert(byPrimary10 !== null) - assert(byPrimary11 !== null) - assert(byPrimary12 !== null) - assert(byPrimary13 !== null) - assert(byPrimary14 !== null) - assert(byPrimary15 !== null) - assert(byPrimary16 !== null) - assert(byPrimary17 !== null) - assert(byPrimary18 !== null) - assert(byPrimary19 !== null) - assert(byPrimary20 !== null) - assert(byPrimary21 !== null) - assert(byPrimary22 !== null) - assert(byPrimary23 !== null) - assert(byPrimary24 !== null) - assert(byPrimary25 !== null) - assert(byPrimary26 !== null) + assert(byPrimary2 !== null); + assert(byPrimary3 !== null); + assert(byPrimary4 !== null); + assert(byPrimary5 !== null); + assert(byPrimary6 !== null); + assert(byPrimary7 !== null); + assert(byPrimary8 !== null); + assert(byPrimary9 !== null); + assert(byPrimary10 !== null); + assert(byPrimary11 !== null); + assert(byPrimary12 !== null); + assert(byPrimary13 !== null); + assert(byPrimary14 !== null); + assert(byPrimary15 !== null); + assert(byPrimary16 !== null); + assert(byPrimary17 !== null); + assert(byPrimary18 !== null); + assert(byPrimary19 !== null); + assert(byPrimary20 !== null); + assert(byPrimary21 !== null); + assert(byPrimary22 !== null); + assert(byPrimary23 !== null); + assert(byPrimary24 !== null); + assert(byPrimary25 !== null); + assert(byPrimary26 !== null); /*const { result: bySecondary1 } = await db.val1.findBySecondaryIndex( "s", @@ -762,132 +762,132 @@ Deno.test("indexable_collection - properties", async (t) => { const { result: bySecondary2 } = await db.val2.findBySecondaryIndex( "s", val2, - ) + ); const { result: bySecondary3 } = await db.val3.findBySecondaryIndex( "s", val3, - ) + ); const { result: bySecondary4 } = await db.val4.findBySecondaryIndex( "s", val4, - ) + ); const { result: bySecondary5 } = await db.val5.findBySecondaryIndex( "s", val5, - ) + ); const { result: bySecondary6 } = await db.val6.findBySecondaryIndex( "s", val6, - ) + ); const { result: bySecondary7 } = await db.val7.findBySecondaryIndex( "s", val7, - ) + ); const { result: bySecondary8 } = await db.val8.findBySecondaryIndex( "s", val8, - ) + ); const { result: bySecondary9 } = await db.val9.findBySecondaryIndex( "s", val9, - ) + ); const { result: bySecondary10 } = await db.val10.findBySecondaryIndex( "s", val10, - ) + ); const { result: bySecondary11 } = await db.val11.findBySecondaryIndex( "s", val11, - ) + ); const { result: bySecondary12 } = await db.val12.findBySecondaryIndex( "s", val12, - ) + ); const { result: bySecondary13 } = await db.val13.findBySecondaryIndex( "s", val13, - ) + ); const { result: bySecondary14 } = await db.val14.findBySecondaryIndex( "s", val14, - ) + ); const { result: bySecondary15 } = await db.val15.findBySecondaryIndex( "s", val15, - ) + ); const { result: bySecondary16 } = await db.val16.findBySecondaryIndex( "s", val16, - ) + ); const { result: bySecondary17 } = await db.val17.findBySecondaryIndex( "s", val17, - ) + ); const { result: bySecondary18 } = await db.val18.findBySecondaryIndex( "s", val18, - ) + ); const { result: bySecondary19 } = await db.val19.findBySecondaryIndex( "s", val19, - ) + ); const { result: bySecondary20 } = await db.val20.findBySecondaryIndex( "s", val20, - ) + ); const { result: bySecondary21 } = await db.val21.findBySecondaryIndex( "s", val21, - ) + ); const { result: bySecondary22 } = await db.val22.findBySecondaryIndex( "s", val22, - ) + ); const { result: bySecondary23 } = await db.val23.findBySecondaryIndex( "s", val23, - ) + ); const { result: bySecondary24 } = await db.val24.findBySecondaryIndex( "s", val24, - ) + ); const { result: bySecondary25 } = await db.val25.findBySecondaryIndex( "s", val25, - ) + ); const { result: bySecondary26 } = await db.val26.findBySecondaryIndex( "s", val26, - ) + ); //assert(bySecondary1.length === 1) - assert(bySecondary2.length === 1) - assert(bySecondary3.length === 1) - assert(bySecondary4.length === 1) - assert(bySecondary5.length === 1) - assert(bySecondary6.length === 1) - assert(bySecondary7.length === 1) - assert(bySecondary8.length === 1) - assert(bySecondary9.length === 1) - assert(bySecondary10.length === 1) - assert(bySecondary11.length === 1) - assert(bySecondary12.length === 1) - assert(bySecondary13.length === 1) - assert(bySecondary14.length === 1) - assert(bySecondary15.length === 1) - assert(bySecondary16.length === 1) - assert(bySecondary17.length === 1) - assert(bySecondary18.length === 1) - assert(bySecondary19.length === 1) - assert(bySecondary20.length === 1) - assert(bySecondary21.length === 1) - assert(bySecondary22.length === 1) - assert(bySecondary23.length === 1) - assert(bySecondary24.length === 1) - assert(bySecondary25.length === 1) - assert(bySecondary26.length === 1) - }) - }) + assert(bySecondary2.length === 1); + assert(bySecondary3.length === 1); + assert(bySecondary4.length === 1); + assert(bySecondary5.length === 1); + assert(bySecondary6.length === 1); + assert(bySecondary7.length === 1); + assert(bySecondary8.length === 1); + assert(bySecondary9.length === 1); + assert(bySecondary10.length === 1); + assert(bySecondary11.length === 1); + assert(bySecondary12.length === 1); + assert(bySecondary13.length === 1); + assert(bySecondary14.length === 1); + assert(bySecondary15.length === 1); + assert(bySecondary16.length === 1); + assert(bySecondary17.length === 1); + assert(bySecondary18.length === 1); + assert(bySecondary19.length === 1); + assert(bySecondary20.length === 1); + assert(bySecondary21.length === 1); + assert(bySecondary22.length === 1); + assert(bySecondary23.length === 1); + assert(bySecondary24.length === 1); + assert(bySecondary25.length === 1); + assert(bySecondary26.length === 1); + }); + }); await t.step("Should successfully generate id asynchronously", async () => { await useKv(async (kv) => { @@ -901,23 +901,23 @@ Deno.test("indexable_collection - properties", async (t) => { const buffer = await crypto.subtle.digest( "SHA-256", new ArrayBuffer(user.age), - ) - return Math.random() * buffer.byteLength + ); + return Math.random() * buffer.byteLength; }, }), - }) + }); - const cr1 = await db.test.add(mockUser1) - const cr2 = await db.atomic((s) => s.test).add(mockUser2).commit() + const cr1 = await db.test.add(mockUser1); + const cr2 = await db.atomic((s) => s.test).add(mockUser2).commit(); const doc2 = await db.test.getOne({ filter: (doc) => doc.value.username === mockUser2.username, - }) - - assert(cr1.ok) - assert(typeof cr1.id === "number") - assert(cr2.ok) - assert(doc2 !== null) - assert(typeof doc2.id === "number") - }) - }) -}) + }); + + assert(cr1.ok); + assert(typeof cr1.id === "number"); + assert(cr2.ok); + assert(doc2 !== null); + assert(typeof doc2.id === "number"); + }); + }); +}); diff --git a/tests/indexable_collection/set.test.ts b/tests/indexable_collection/set.test.ts index 559f72c..bfe4dd8 100644 --- a/tests/indexable_collection/set.test.ts +++ b/tests/indexable_collection/set.test.ts @@ -1,118 +1,120 @@ -import { assert } from "../test.deps.ts" -import { mockUser1, mockUser2, mockUserInvalid } from "../mocks.ts" -import { useDb } from "../utils.ts" +import { assert } from "../test.deps.ts"; +import { mockUser1, mockUser2, mockUserInvalid } from "../mocks.ts"; +import { useDb } from "../utils.ts"; Deno.test("indexable_collection - set", async (t) => { await t.step("Should set new document entry in collection", async () => { await useDb(async (db) => { - const cr = await db.i_users.set("id", mockUser1) - assert(cr.ok) + const cr = await db.i_users.set("id", mockUser1); + assert(cr.ok); - const doc = await db.i_users.find(cr.id) - assert(doc !== null) - assert(doc.value.username === mockUser1.username) - }) - }) + const doc = await db.i_users.find(cr.id); + assert(doc !== null); + assert(doc.value.username === mockUser1.username); + }); + }); await t.step( "Should not set new document entry in collection with colliding id", async () => { await useDb(async (db) => { - const cr1 = await db.i_users.set("id", mockUser1) - assert(cr1.ok) + const cr1 = await db.i_users.set("id", mockUser1); + assert(cr1.ok); - const cr2 = await db.i_users.set("id", mockUser2) - assert(!cr2.ok) + const cr2 = await db.i_users.set("id", mockUser2); + assert(!cr2.ok); - const doc = await db.i_users.find("id") - assert(doc !== null) - assert(doc.value.username === mockUser1.username) - }) + const doc = await db.i_users.find("id"); + assert(doc !== null); + assert(doc.value.username === mockUser1.username); + }); }, - ) + ); await t.step( "Should not set new document entry in collection with primary index", async () => { await useDb(async (db) => { - const cr1 = await db.i_users.set("id1", mockUser1) - assert(cr1.ok) + const cr1 = await db.i_users.set("id1", mockUser1); + assert(cr1.ok); - const cr2 = await db.i_users.set("id2", mockUser1) - assert(!cr2.ok) + const cr2 = await db.i_users.set("id2", mockUser1); + assert(!cr2.ok); const byPrimary = await db.i_users.findByPrimaryIndex( "username", mockUser1.username, - ) + ); const bySecondary = await db.i_users.findBySecondaryIndex( "age", mockUser1.age, - ) + ); - assert(byPrimary?.id === cr1.id) - assert(bySecondary.result.length === 1) - }) + assert(byPrimary?.id === cr1.id); + assert(bySecondary.result.length === 1); + }); }, - ) + ); await t.step( "Should overwrite document in collection with colliding id", async () => { await useDb(async (db) => { - const cr1 = await db.i_users.set("id", mockUser1) - assert(cr1.ok) + const cr1 = await db.i_users.set("id", mockUser1); + assert(cr1.ok); - const cr2 = await db.i_users.set("id", mockUser2, { overwrite: true }) - assert(cr2.ok) + const cr2 = await db.i_users.set("id", mockUser2, { overwrite: true }); + assert(cr2.ok); - const doc = await db.i_users.find("id") - assert(doc !== null) - assert(doc.value.username === mockUser2.username) - }) + const doc = await db.i_users.find("id"); + assert(doc !== null); + assert(doc.value.username === mockUser2.username); + }); }, - ) + ); await t.step( "Should not overwrite document in collection with colliding primary index", async () => { await useDb(async (db) => { - const cr1 = await db.i_users.set("id1", mockUser1) - assert(cr1.ok) + const cr1 = await db.i_users.set("id1", mockUser1); + assert(cr1.ok); - const cr2 = await db.i_users.set("id2", mockUser1, { overwrite: true }) - assert(!cr2.ok) + const cr2 = await db.i_users.set("id2", mockUser1, { overwrite: true }); + assert(!cr2.ok); const byPrimary = await db.i_users.findByPrimaryIndex( "username", mockUser1.username, - ) + ); const bySecondary = await db.i_users.findBySecondaryIndex( "age", mockUser1.age, - ) + ); - assert(byPrimary?.id === cr1.id) - assert(bySecondary.result.length === 1) - }) + assert(byPrimary?.id === cr1.id); + assert(bySecondary.result.length === 1); + }); }, - ) + ); await t.step("Should successfully parse and set document", async () => { await useDb(async (db) => { - let assertion = true - await db.zi_users.set("id", mockUser1).catch(() => assertion = false) - assert(assertion) - }) - }) + let assertion = true; + await db.zi_users.set("id", mockUser1).catch(() => assertion = false); + assert(assertion); + }); + }); await t.step("Should fail to parse and set document", async () => { await useDb(async (db) => { - let assertion = false - await db.zi_users.set("id", mockUserInvalid).catch(() => assertion = true) - assert(assertion) - }) - }) -}) + let assertion = false; + await db.zi_users.set("id", mockUserInvalid).catch(() => + assertion = true + ); + assert(assertion); + }); + }); +}); diff --git a/tests/indexable_collection/types.test.ts b/tests/indexable_collection/types.test.ts index 5b9f8fc..fdcd998 100644 --- a/tests/indexable_collection/types.test.ts +++ b/tests/indexable_collection/types.test.ts @@ -1,7 +1,7 @@ -import { collection, kvdex, model } from "../../mod.ts" -import { assert, assertEquals } from "../test.deps.ts" -import { useKv } from "../utils.ts" -import { TObject } from "../values.ts" +import { collection, kvdex, model } from "../../mod.ts"; +import { assert, assertEquals } from "../test.deps.ts"; +import { useKv } from "../utils.ts"; +import { TObject } from "../values.ts"; Deno.test("indexable_collection - types", async (t) => { await t.step( @@ -15,14 +15,14 @@ Deno.test("indexable_collection - types", async (t) => { TNumber: "secondary", }, }), - }) + }); - const cr = await db.objects.add(TObject) - assert(cr.ok) + const cr = await db.objects.add(TObject); + assert(cr.ok); - const doc = await db.objects.find(cr.id) - assertEquals(doc?.value, TObject) - }) + const doc = await db.objects.find(cr.id); + assertEquals(doc?.value, TObject); + }); }, - ) -}) + ); +}); diff --git a/tests/indexable_collection/update.test.ts b/tests/indexable_collection/update.test.ts index b0c42ca..60a7d87 100644 --- a/tests/indexable_collection/update.test.ts +++ b/tests/indexable_collection/update.test.ts @@ -1,17 +1,17 @@ -import type { Document } from "../../mod.ts" -import { assert } from "../test.deps.ts" -import { mockUser1, mockUser2, mockUserInvalid } from "../mocks.ts" -import type { User } from "../models.ts" -import { useDb } from "../utils.ts" -import { mockUser3 } from "../mocks.ts" +import type { Document } from "../../mod.ts"; +import { assert } from "../test.deps.ts"; +import { mockUser1, mockUser2, mockUserInvalid } from "../mocks.ts"; +import type { User } from "../models.ts"; +import { useDb } from "../utils.ts"; +import { mockUser3 } from "../mocks.ts"; Deno.test("indexable_collection - update", async (t) => { await t.step( "Should update document of KvObject type using shallow merge", async () => { await useDb(async (db) => { - const cr = await db.i_users.add(mockUser1) - assert(cr.ok) + const cr = await db.i_users.add(mockUser1); + assert(cr.ok); const updateData = { address: { @@ -19,55 +19,57 @@ Deno.test("indexable_collection - update", async (t) => { city: "Dublin", houseNr: null, }, - } + }; const updateCr = await db.i_users.update(cr.id, updateData, { strategy: "merge-shallow", - }) + }); - const byId = await db.i_users.find(cr.id) + const byId = await db.i_users.find(cr.id); const byPrimary = await db.i_users.findByPrimaryIndex( "username", mockUser1.username, - ) + ); const bySecondary = await db.i_users.findBySecondaryIndex( "age", mockUser1.age, - ) - - assert(updateCr.ok) - assert(updateCr.id === cr.id) - assert(byPrimary?.id === cr.id) - assert(bySecondary.result.at(0)?.id === cr.id) - assert(updateCr.versionstamp !== cr.versionstamp) - assert(updateCr.versionstamp === byPrimary.versionstamp) - assert(updateCr.versionstamp === bySecondary.result.at(0)?.versionstamp) + ); + + assert(updateCr.ok); + assert(updateCr.id === cr.id); + assert(byPrimary?.id === cr.id); + assert(bySecondary.result.at(0)?.id === cr.id); + assert(updateCr.versionstamp !== cr.versionstamp); + assert(updateCr.versionstamp === byPrimary.versionstamp); + assert( + updateCr.versionstamp === bySecondary.result.at(0)?.versionstamp, + ); const asserts = (doc: Document | null) => { - assert(doc !== null) - assert(doc.value.username === mockUser1.username) - assert(doc.value.age === mockUser1.age) - assert(doc.value.address.country === updateData.address.country) - assert(doc.value.address.city === updateData.address.city) - assert(doc.value.address.houseNr === updateData.address.houseNr) - assert(typeof doc.value.address.street === "undefined") - } - - asserts(byId) - asserts(byPrimary) - asserts(bySecondary.result.at(0) ?? null) - }) + assert(doc !== null); + assert(doc.value.username === mockUser1.username); + assert(doc.value.age === mockUser1.age); + assert(doc.value.address.country === updateData.address.country); + assert(doc.value.address.city === updateData.address.city); + assert(doc.value.address.houseNr === updateData.address.houseNr); + assert(typeof doc.value.address.street === "undefined"); + }; + + asserts(byId); + asserts(byPrimary); + asserts(bySecondary.result.at(0) ?? null); + }); }, - ) + ); await t.step( "Should update document of KvObject type using deep merge", async () => { await useDb(async (db) => { - const cr = await db.i_users.add(mockUser1) - assert(cr.ok) + const cr = await db.i_users.add(mockUser1); + assert(cr.ok); const updateData = { address: { @@ -75,154 +77,158 @@ Deno.test("indexable_collection - update", async (t) => { city: "Dublin", houseNr: null, }, - } + }; const updateCr = await db.i_users.update(cr.id, updateData, { strategy: "merge", - }) + }); - const byId = await db.i_users.find(cr.id) + const byId = await db.i_users.find(cr.id); const byPrimary = await db.i_users.findByPrimaryIndex( "username", mockUser1.username, - ) + ); const bySecondary = await db.i_users.findBySecondaryIndex( "age", mockUser1.age, - ) - - assert(updateCr.ok) - assert(updateCr.id === cr.id) - assert(byPrimary?.id === cr.id) - assert(bySecondary.result.at(0)?.id === cr.id) - assert(updateCr.versionstamp !== cr.versionstamp) - assert(updateCr.versionstamp === byPrimary.versionstamp) - assert(updateCr.versionstamp === bySecondary.result.at(0)?.versionstamp) + ); + + assert(updateCr.ok); + assert(updateCr.id === cr.id); + assert(byPrimary?.id === cr.id); + assert(bySecondary.result.at(0)?.id === cr.id); + assert(updateCr.versionstamp !== cr.versionstamp); + assert(updateCr.versionstamp === byPrimary.versionstamp); + assert( + updateCr.versionstamp === bySecondary.result.at(0)?.versionstamp, + ); const asserts = (doc: Document | null) => { - assert(doc !== null) - assert(doc.value.username === mockUser1.username) - assert(doc.value.age === mockUser1.age) - assert(doc.value.address.country === updateData.address.country) - assert(doc.value.address.city === updateData.address.city) - assert(doc.value.address.houseNr === updateData.address.houseNr) - assert(doc.value.address.street !== undefined) - } - - asserts(byId) - asserts(byPrimary) - asserts(bySecondary.result.at(0) ?? null) - }) + assert(doc !== null); + assert(doc.value.username === mockUser1.username); + assert(doc.value.age === mockUser1.age); + assert(doc.value.address.country === updateData.address.country); + assert(doc.value.address.city === updateData.address.city); + assert(doc.value.address.houseNr === updateData.address.houseNr); + assert(doc.value.address.street !== undefined); + }; + + asserts(byId); + asserts(byPrimary); + asserts(bySecondary.result.at(0) ?? null); + }); }, - ) + ); await t.step( "Should update document of KvObject type using replace", async () => { await useDb(async (db) => { - const cr = await db.i_users.add(mockUser1) - assert(cr.ok) + const cr = await db.i_users.add(mockUser1); + assert(cr.ok); const updateCr = await db.i_users.update(cr.id, mockUser2, { strategy: "replace", - }) + }); - const byId = await db.i_users.find(cr.id) + const byId = await db.i_users.find(cr.id); const byPrimary = await db.i_users.findByPrimaryIndex( "username", mockUser2.username, - ) + ); const bySecondary = await db.i_users.findBySecondaryIndex( "age", mockUser2.age, - ) - - assert(updateCr.ok) - assert(updateCr.id === cr.id) - assert(byPrimary?.id === cr.id) - assert(bySecondary.result.at(0)?.id === cr.id) - assert(updateCr.versionstamp !== cr.versionstamp) - assert(updateCr.versionstamp === byPrimary.versionstamp) - assert(updateCr.versionstamp === bySecondary.result.at(0)?.versionstamp) + ); + + assert(updateCr.ok); + assert(updateCr.id === cr.id); + assert(byPrimary?.id === cr.id); + assert(bySecondary.result.at(0)?.id === cr.id); + assert(updateCr.versionstamp !== cr.versionstamp); + assert(updateCr.versionstamp === byPrimary.versionstamp); + assert( + updateCr.versionstamp === bySecondary.result.at(0)?.versionstamp, + ); const asserts = (doc: Document | null) => { - assert(doc !== null) - assert(doc.value.username === mockUser2.username) - assert(doc.value.age === mockUser2.age) - assert(doc.value.address.country === mockUser2.address.country) - assert(doc.value.address.city === mockUser2.address.city) - assert(doc.value.address.houseNr === mockUser2.address.houseNr) - assert(doc.value.address.street === mockUser2.address.street) - } - - asserts(byId) - asserts(byPrimary) - asserts(bySecondary.result.at(0) ?? null) - }) + assert(doc !== null); + assert(doc.value.username === mockUser2.username); + assert(doc.value.age === mockUser2.age); + assert(doc.value.address.country === mockUser2.address.country); + assert(doc.value.address.city === mockUser2.address.city); + assert(doc.value.address.houseNr === mockUser2.address.houseNr); + assert(doc.value.address.street === mockUser2.address.street); + }; + + asserts(byId); + asserts(byPrimary); + asserts(bySecondary.result.at(0) ?? null); + }); }, - ) + ); await t.step( "Should not update document or delete indexed entries upon index collision", async () => { await useDb(async (db) => { - const id1 = "id1" - const id2 = "id2" + const id1 = "id1"; + const id2 = "id2"; - const cr1 = await db.i_users.set(id1, mockUser1) - const cr2 = await db.i_users.set(id2, mockUser2) + const cr1 = await db.i_users.set(id1, mockUser1); + const cr2 = await db.i_users.set(id2, mockUser2); - assert(cr1.ok) - assert(cr2.ok) + assert(cr1.ok); + assert(cr2.ok); const update = await db.i_users.update(id2, { ...mockUser3, username: mockUser2.username, - }) + }); - assert(!update.ok) + assert(!update.ok); - const doc = await db.i_users.find(id2) + const doc = await db.i_users.find(id2); const docByPrimaryIndex = await db.i_users.findByPrimaryIndex( "username", mockUser2.username, - ) + ); - assert(doc?.value.username === mockUser2.username) - assert(docByPrimaryIndex?.value.username === mockUser2.username) - }) + assert(doc?.value.username === mockUser2.username); + assert(docByPrimaryIndex?.value.username === mockUser2.username); + }); }, - ) + ); await t.step("Should successfully parse and update document", async () => { await useDb(async (db) => { - let assertion = true + let assertion = true; - const cr = await db.zi_users.add(mockUser1) - assert(cr.ok) + const cr = await db.zi_users.add(mockUser1); + assert(cr.ok); - await db.zi_users.update(cr.id, mockUser2).catch(() => assertion = false) + await db.zi_users.update(cr.id, mockUser2).catch(() => assertion = false); - assert(assertion) - }) - }) + assert(assertion); + }); + }); await t.step("Should fail to parse and update document", async () => { await useDb(async (db) => { - let assertion = false + let assertion = false; - const cr = await db.zi_users.add(mockUser1) - assert(cr.ok) + const cr = await db.zi_users.add(mockUser1); + assert(cr.ok); await db.zi_users.update(cr.id, mockUserInvalid).catch(() => assertion = true - ) + ); - assert(assertion) - }) - }) -}) + assert(assertion); + }); + }); +}); diff --git a/tests/indexable_collection/updateByPrimaryIndex.test.ts b/tests/indexable_collection/updateByPrimaryIndex.test.ts index 6481a06..ffb8ac6 100644 --- a/tests/indexable_collection/updateByPrimaryIndex.test.ts +++ b/tests/indexable_collection/updateByPrimaryIndex.test.ts @@ -1,16 +1,16 @@ -import type { Document } from "../../mod.ts" -import { assert } from "../test.deps.ts" -import { mockUser1, mockUser2, mockUserInvalid } from "../mocks.ts" -import type { User } from "../models.ts" -import { useDb } from "../utils.ts" +import type { Document } from "../../mod.ts"; +import { assert } from "../test.deps.ts"; +import { mockUser1, mockUser2, mockUserInvalid } from "../mocks.ts"; +import type { User } from "../models.ts"; +import { useDb } from "../utils.ts"; Deno.test("indexable_collection - updateByPrimaryIndex", async (t) => { await t.step( "Should update document of KvObject type using shallow merge", async () => { await useDb(async (db) => { - const cr = await db.i_users.add(mockUser1) - assert(cr.ok) + const cr = await db.i_users.add(mockUser1); + assert(cr.ok); const updateData = { address: { @@ -18,7 +18,7 @@ Deno.test("indexable_collection - updateByPrimaryIndex", async (t) => { city: "Dublin", houseNr: null, }, - } + }; const updateCr = await db.i_users.updateByPrimaryIndex( "username", @@ -27,51 +27,53 @@ Deno.test("indexable_collection - updateByPrimaryIndex", async (t) => { { strategy: "merge-shallow", }, - ) + ); - const byId = await db.i_users.find(cr.id) + const byId = await db.i_users.find(cr.id); const byPrimary = await db.i_users.findByPrimaryIndex( "username", mockUser1.username, - ) + ); const bySecondary = await db.i_users.findBySecondaryIndex( "age", mockUser1.age, - ) - - assert(updateCr.ok) - assert(updateCr.id === cr.id) - assert(byPrimary?.id === cr.id) - assert(bySecondary.result.at(0)?.id === cr.id) - assert(updateCr.versionstamp !== cr.versionstamp) - assert(updateCr.versionstamp === byPrimary.versionstamp) - assert(updateCr.versionstamp === bySecondary.result.at(0)?.versionstamp) + ); + + assert(updateCr.ok); + assert(updateCr.id === cr.id); + assert(byPrimary?.id === cr.id); + assert(bySecondary.result.at(0)?.id === cr.id); + assert(updateCr.versionstamp !== cr.versionstamp); + assert(updateCr.versionstamp === byPrimary.versionstamp); + assert( + updateCr.versionstamp === bySecondary.result.at(0)?.versionstamp, + ); const asserts = (doc: Document | null) => { - assert(doc !== null) - assert(doc.value.username === mockUser1.username) - assert(doc.value.age === mockUser1.age) - assert(doc.value.address.country === updateData.address.country) - assert(doc.value.address.city === updateData.address.city) - assert(doc.value.address.houseNr === updateData.address.houseNr) - assert(typeof doc.value.address.street === "undefined") - } - - asserts(byId) - asserts(byPrimary) - asserts(bySecondary.result.at(0) ?? null) - }) + assert(doc !== null); + assert(doc.value.username === mockUser1.username); + assert(doc.value.age === mockUser1.age); + assert(doc.value.address.country === updateData.address.country); + assert(doc.value.address.city === updateData.address.city); + assert(doc.value.address.houseNr === updateData.address.houseNr); + assert(typeof doc.value.address.street === "undefined"); + }; + + asserts(byId); + asserts(byPrimary); + asserts(bySecondary.result.at(0) ?? null); + }); }, - ) + ); await t.step( "Should update document of KvObject type using deep merge", async () => { await useDb(async (db) => { - const cr = await db.i_users.add(mockUser1) - assert(cr.ok) + const cr = await db.i_users.add(mockUser1); + assert(cr.ok); const updateData = { address: { @@ -79,7 +81,7 @@ Deno.test("indexable_collection - updateByPrimaryIndex", async (t) => { city: "Dublin", houseNr: null, }, - } + }; const updateCr = await db.i_users.updateByPrimaryIndex( "username", @@ -88,51 +90,53 @@ Deno.test("indexable_collection - updateByPrimaryIndex", async (t) => { { strategy: "merge", }, - ) + ); - const byId = await db.i_users.find(cr.id) + const byId = await db.i_users.find(cr.id); const byPrimary = await db.i_users.findByPrimaryIndex( "username", mockUser1.username, - ) + ); const bySecondary = await db.i_users.findBySecondaryIndex( "age", mockUser1.age, - ) - - assert(updateCr.ok) - assert(updateCr.id === cr.id) - assert(byPrimary?.id === cr.id) - assert(bySecondary.result.at(0)?.id === cr.id) - assert(updateCr.versionstamp !== cr.versionstamp) - assert(updateCr.versionstamp === byPrimary.versionstamp) - assert(updateCr.versionstamp === bySecondary.result.at(0)?.versionstamp) + ); + + assert(updateCr.ok); + assert(updateCr.id === cr.id); + assert(byPrimary?.id === cr.id); + assert(bySecondary.result.at(0)?.id === cr.id); + assert(updateCr.versionstamp !== cr.versionstamp); + assert(updateCr.versionstamp === byPrimary.versionstamp); + assert( + updateCr.versionstamp === bySecondary.result.at(0)?.versionstamp, + ); const asserts = (doc: Document | null) => { - assert(doc !== null) - assert(doc.value.username === mockUser1.username) - assert(doc.value.age === mockUser1.age) - assert(doc.value.address.country === updateData.address.country) - assert(doc.value.address.city === updateData.address.city) - assert(doc.value.address.houseNr === updateData.address.houseNr) - assert(doc.value.address.street !== undefined) - } - - asserts(byId) - asserts(byPrimary) - asserts(bySecondary.result.at(0) ?? null) - }) + assert(doc !== null); + assert(doc.value.username === mockUser1.username); + assert(doc.value.age === mockUser1.age); + assert(doc.value.address.country === updateData.address.country); + assert(doc.value.address.city === updateData.address.city); + assert(doc.value.address.houseNr === updateData.address.houseNr); + assert(doc.value.address.street !== undefined); + }; + + asserts(byId); + asserts(byPrimary); + asserts(bySecondary.result.at(0) ?? null); + }); }, - ) + ); await t.step( "Should update document of KvObject type using replace", async () => { await useDb(async (db) => { - const cr = await db.i_users.add(mockUser1) - assert(cr.ok) + const cr = await db.i_users.add(mockUser1); + assert(cr.ok); const updateCr = await db.i_users.updateByPrimaryIndex( "username", @@ -141,76 +145,78 @@ Deno.test("indexable_collection - updateByPrimaryIndex", async (t) => { { strategy: "replace", }, - ) + ); - const byId = await db.i_users.find(cr.id) + const byId = await db.i_users.find(cr.id); const byPrimary = await db.i_users.findByPrimaryIndex( "username", mockUser2.username, - ) + ); const bySecondary = await db.i_users.findBySecondaryIndex( "age", mockUser2.age, - ) - - assert(updateCr.ok) - assert(updateCr.id === cr.id) - assert(byPrimary?.id === cr.id) - assert(bySecondary.result.at(0)?.id === cr.id) - assert(updateCr.versionstamp !== cr.versionstamp) - assert(updateCr.versionstamp === byPrimary.versionstamp) - assert(updateCr.versionstamp === bySecondary.result.at(0)?.versionstamp) + ); + + assert(updateCr.ok); + assert(updateCr.id === cr.id); + assert(byPrimary?.id === cr.id); + assert(bySecondary.result.at(0)?.id === cr.id); + assert(updateCr.versionstamp !== cr.versionstamp); + assert(updateCr.versionstamp === byPrimary.versionstamp); + assert( + updateCr.versionstamp === bySecondary.result.at(0)?.versionstamp, + ); const asserts = (doc: Document | null) => { - assert(doc !== null) - assert(doc.value.username === mockUser2.username) - assert(doc.value.age === mockUser2.age) - assert(doc.value.address.country === mockUser2.address.country) - assert(doc.value.address.city === mockUser2.address.city) - assert(doc.value.address.houseNr === mockUser2.address.houseNr) - assert(doc.value.address.street === mockUser2.address.street) - } - - asserts(byId) - asserts(byPrimary) - asserts(bySecondary.result.at(0) ?? null) - }) + assert(doc !== null); + assert(doc.value.username === mockUser2.username); + assert(doc.value.age === mockUser2.age); + assert(doc.value.address.country === mockUser2.address.country); + assert(doc.value.address.city === mockUser2.address.city); + assert(doc.value.address.houseNr === mockUser2.address.houseNr); + assert(doc.value.address.street === mockUser2.address.street); + }; + + asserts(byId); + asserts(byPrimary); + asserts(bySecondary.result.at(0) ?? null); + }); }, - ) + ); await t.step("Should successfully parse and update document", async () => { await useDb(async (db) => { - let assertion = true + let assertion = true; - const cr = await db.zi_users.add(mockUser1) - assert(cr.ok) + const cr = await db.zi_users.add(mockUser1); + assert(cr.ok); await db.zi_users.updateByPrimaryIndex( "username", mockUser1.username, mockUser2, - ).catch(() => assertion = false) + ).catch(() => assertion = false); - assert(assertion) - }) - }) + assert(assertion); + }); + }); await t.step("Should fail to parse and update document", async () => { await useDb(async (db) => { - let assertion = false + let assertion = false; - const cr = await db.zi_users.add(mockUser1) - assert(cr.ok) + const cr = await db.zi_users.add(mockUser1); + assert(cr.ok); await db.zi_users.updateByPrimaryIndex( "username", mockUser1.username, mockUserInvalid, - ).catch(() => assertion = true) + ).catch(() => assertion = true); - assert(assertion) - }) - }) -}) + assert(assertion); + }); + }); +}); diff --git a/tests/indexable_collection/updateBySecondaryIndex.test.ts b/tests/indexable_collection/updateBySecondaryIndex.test.ts index 6697505..0775b85 100644 --- a/tests/indexable_collection/updateBySecondaryIndex.test.ts +++ b/tests/indexable_collection/updateBySecondaryIndex.test.ts @@ -1,19 +1,19 @@ -import { assert } from "../test.deps.ts" -import { mockUser1, mockUserInvalid } from "../mocks.ts" -import { generateUsers, useDb } from "../utils.ts" +import { assert } from "../test.deps.ts"; +import { mockUser1, mockUserInvalid } from "../mocks.ts"; +import { generateUsers, useDb } from "../utils.ts"; Deno.test("indexable_collection - updateBySecondaryIndex", async (t) => { await t.step( "Should update 1000 documents of KvObject type using shallow merge", async () => { await useDb(async (db) => { - const users = generateUsers(1_000) - const cr = await db.i_users.addMany(users) - assert(cr.ok) + const users = generateUsers(1_000); + const cr = await db.i_users.addMany(users); + assert(cr.ok); - const docs = await db.i_users.getMany() - const ids = docs.result.map((doc) => doc.id) - const versionstamps = docs.result.map((doc) => doc.versionstamp) + const docs = await db.i_users.getMany(); + const ids = docs.result.map((doc) => doc.id); + const versionstamps = docs.result.map((doc) => doc.versionstamp); const updateData = { address: { @@ -21,7 +21,7 @@ Deno.test("indexable_collection - updateBySecondaryIndex", async (t) => { city: "Dublin", houseNr: null, }, - } + }; const { result } = await db.i_users.updateBySecondaryIndex( "age", @@ -30,36 +30,36 @@ Deno.test("indexable_collection - updateBySecondaryIndex", async (t) => { { strategy: "merge-shallow", }, - ) + ); assert( result.every((cr) => cr.ok && ids.includes(cr.id) && !versionstamps.includes(cr.versionstamp) ), - ) + ); await db.i_users.forEachBySecondaryIndex("age", users[0].age, (doc) => { - assert(doc.value.address.country === updateData.address.country) - assert(doc.value.address.city === updateData.address.city) - assert(doc.value.address.houseNr === updateData.address.houseNr) - assert(typeof doc.value.address.street === "undefined") - }) - }) + assert(doc.value.address.country === updateData.address.country); + assert(doc.value.address.city === updateData.address.city); + assert(doc.value.address.houseNr === updateData.address.houseNr); + assert(typeof doc.value.address.street === "undefined"); + }); + }); }, - ) + ); await t.step( "Should update 1000 documents of KvObject type using deep merge", async () => { await useDb(async (db) => { - const users = generateUsers(1_000) - const cr = await db.i_users.addMany(users) - assert(cr.ok) + const users = generateUsers(1_000); + const cr = await db.i_users.addMany(users); + assert(cr.ok); - const docs = await db.i_users.getMany() - const ids = docs.result.map((doc) => doc.id) - const versionstamps = docs.result.map((doc) => doc.versionstamp) + const docs = await db.i_users.getMany(); + const ids = docs.result.map((doc) => doc.id); + const versionstamps = docs.result.map((doc) => doc.versionstamp); const updateData = { address: { @@ -67,7 +67,7 @@ Deno.test("indexable_collection - updateBySecondaryIndex", async (t) => { city: "Dublin", houseNr: null, }, - } + }; const { result } = await db.i_users.updateBySecondaryIndex( "age", @@ -76,36 +76,36 @@ Deno.test("indexable_collection - updateBySecondaryIndex", async (t) => { { strategy: "merge", }, - ) + ); assert( result.every((cr) => cr.ok && ids.includes(cr.id) && !versionstamps.includes(cr.versionstamp) ), - ) + ); await db.i_users.forEachBySecondaryIndex("age", users[0].age, (doc) => { - assert(doc.value.address.country === updateData.address.country) - assert(doc.value.address.city === updateData.address.city) - assert(doc.value.address.houseNr === updateData.address.houseNr) - assert(doc.value.address.street !== undefined) - }) - }) + assert(doc.value.address.country === updateData.address.country); + assert(doc.value.address.city === updateData.address.city); + assert(doc.value.address.houseNr === updateData.address.houseNr); + assert(doc.value.address.street !== undefined); + }); + }); }, - ) + ); await t.step( "Should only update one document of type KvObject using replace (primary index collision)", async () => { await useDb(async (db) => { - const users = generateUsers(1_000) - const cr = await db.i_users.addMany(users) - assert(cr.ok) + const users = generateUsers(1_000); + const cr = await db.i_users.addMany(users); + assert(cr.ok); - const docs = await db.i_users.getMany() - const ids = docs.result.map((doc) => doc.id) - const versionstamps = docs.result.map((doc) => doc.versionstamp) + const docs = await db.i_users.getMany(); + const ids = docs.result.map((doc) => doc.id); + const versionstamps = docs.result.map((doc) => doc.versionstamp); const { result } = await db.i_users.updateBySecondaryIndex( "age", @@ -114,81 +114,81 @@ Deno.test("indexable_collection - updateBySecondaryIndex", async (t) => { { strategy: "replace", }, - ) + ); assert( result.some((cr) => cr.ok && ids.includes(cr.id) && !versionstamps.includes(cr.versionstamp) ), - ) + ); assert( result.some((cr) => !cr.ok), - ) + ); const byPrimary = await db.i_users.findByPrimaryIndex( "username", mockUser1.username, - ) + ); const { result: bySecondaryDocs } = await db.i_users .findBySecondaryIndex( "age", mockUser1.age, - ) + ); const bySecondary = bySecondaryDocs.find((doc) => doc.value.username === mockUser1.username - ) ?? null - - assert(byPrimary !== null) - assert(byPrimary.value.username === mockUser1.username) - assert(byPrimary.value.address.country === mockUser1.address.country) - assert(byPrimary.value.address.city === mockUser1.address.city) - assert(byPrimary.value.address.houseNr === mockUser1.address.houseNr) - assert(byPrimary.value.address.street === mockUser1.address.street) - - assert(bySecondary !== null) - assert(bySecondary.value.username === mockUser1.username) - assert(bySecondary.value.address.country === mockUser1.address.country) - assert(bySecondary.value.address.city === mockUser1.address.city) - assert(bySecondary.value.address.houseNr === mockUser1.address.houseNr) - assert(bySecondary.value.address.street === mockUser1.address.street) - }) + ) ?? null; + + assert(byPrimary !== null); + assert(byPrimary.value.username === mockUser1.username); + assert(byPrimary.value.address.country === mockUser1.address.country); + assert(byPrimary.value.address.city === mockUser1.address.city); + assert(byPrimary.value.address.houseNr === mockUser1.address.houseNr); + assert(byPrimary.value.address.street === mockUser1.address.street); + + assert(bySecondary !== null); + assert(bySecondary.value.username === mockUser1.username); + assert(bySecondary.value.address.country === mockUser1.address.country); + assert(bySecondary.value.address.city === mockUser1.address.city); + assert(bySecondary.value.address.houseNr === mockUser1.address.houseNr); + assert(bySecondary.value.address.street === mockUser1.address.street); + }); }, - ) + ); await t.step("Should successfully parse and update", async () => { await useDb(async (db) => { - const users = generateUsers(10) - let assertion = true + const users = generateUsers(10); + let assertion = true; - const cr = await db.zi_users.addMany(users) - assert(cr.ok) + const cr = await db.zi_users.addMany(users); + assert(cr.ok); await db.zi_users.updateBySecondaryIndex("age", users[0].age, mockUser1) - .catch(() => assertion = false) + .catch(() => assertion = false); - assert(assertion) - }) - }) + assert(assertion); + }); + }); await t.step("Should fail to parse and update document", async () => { await useDb(async (db) => { - const users = generateUsers(10) - let assertion = false + const users = generateUsers(10); + let assertion = false; - const cr = await db.zi_users.addMany(users) - assert(cr.ok) + const cr = await db.zi_users.addMany(users); + assert(cr.ok); await db.zi_users.updateBySecondaryIndex( "age", users[0].age, mockUserInvalid, - ).catch(() => assertion = true) + ).catch(() => assertion = true); - assert(assertion) - }) - }) -}) + assert(assertion); + }); + }); +}); diff --git a/tests/indexable_collection/updateMany.test.ts b/tests/indexable_collection/updateMany.test.ts index aebb07a..28e61bb 100644 --- a/tests/indexable_collection/updateMany.test.ts +++ b/tests/indexable_collection/updateMany.test.ts @@ -1,19 +1,19 @@ -import { assert } from "../test.deps.ts" -import { mockUser1, mockUserInvalid } from "../mocks.ts" -import { generateUsers, useDb } from "../utils.ts" +import { assert } from "../test.deps.ts"; +import { mockUser1, mockUserInvalid } from "../mocks.ts"; +import { generateUsers, useDb } from "../utils.ts"; Deno.test("indexable_collection - updateMany", async (t) => { await t.step( "Should update 1000 documents of KvObject type using shallow merge", async () => { await useDb(async (db) => { - const users = generateUsers(1_000) - const cr = await db.i_users.addMany(users) - assert(cr.ok) + const users = generateUsers(1_000); + const cr = await db.i_users.addMany(users); + assert(cr.ok); - const docs = await db.i_users.getMany() - const ids = docs.result.map((doc) => doc.id) - const versionstamps = docs.result.map((doc) => doc.versionstamp) + const docs = await db.i_users.getMany(); + const ids = docs.result.map((doc) => doc.id); + const versionstamps = docs.result.map((doc) => doc.versionstamp); const updateData = { address: { @@ -21,40 +21,40 @@ Deno.test("indexable_collection - updateMany", async (t) => { city: "Dublin", houseNr: null, }, - } + }; const { result } = await db.i_users.updateMany(updateData, { strategy: "merge-shallow", - }) + }); assert( result.every((cr) => cr.ok && ids.includes(cr.id) && !versionstamps.includes(cr.versionstamp) ), - ) + ); await db.i_users.forEach((doc) => { - assert(doc.value.address.country === updateData.address.country) - assert(doc.value.address.city === updateData.address.city) - assert(doc.value.address.houseNr === updateData.address.houseNr) - assert(typeof doc.value.address.street === "undefined") - }) - }) + assert(doc.value.address.country === updateData.address.country); + assert(doc.value.address.city === updateData.address.city); + assert(doc.value.address.houseNr === updateData.address.houseNr); + assert(typeof doc.value.address.street === "undefined"); + }); + }); }, - ) + ); await t.step( "Should update 1000 documents of KvObject type using deep merge", async () => { await useDb(async (db) => { - const users = generateUsers(1_000) - const cr = await db.i_users.addMany(users) - assert(cr.ok) + const users = generateUsers(1_000); + const cr = await db.i_users.addMany(users); + assert(cr.ok); - const docs = await db.i_users.getMany() - const ids = docs.result.map((doc) => doc.id) - const versionstamps = docs.result.map((doc) => doc.versionstamp) + const docs = await db.i_users.getMany(); + const ids = docs.result.map((doc) => doc.id); + const versionstamps = docs.result.map((doc) => doc.versionstamp); const updateData = { address: { @@ -62,110 +62,110 @@ Deno.test("indexable_collection - updateMany", async (t) => { city: "Dublin", houseNr: null, }, - } + }; const { result } = await db.i_users.updateMany(updateData, { strategy: "merge", - }) + }); assert( result.every((cr) => cr.ok && ids.includes(cr.id) && !versionstamps.includes(cr.versionstamp) ), - ) + ); await db.i_users.forEach((doc) => { - assert(doc.value.address.country === updateData.address.country) - assert(doc.value.address.city === updateData.address.city) - assert(doc.value.address.houseNr === updateData.address.houseNr) - assert(doc.value.address.street !== undefined) - }) - }) + assert(doc.value.address.country === updateData.address.country); + assert(doc.value.address.city === updateData.address.city); + assert(doc.value.address.houseNr === updateData.address.houseNr); + assert(doc.value.address.street !== undefined); + }); + }); }, - ) + ); await t.step( "Should only update one document of type KvObject using replace (primary index collision)", async () => { await useDb(async (db) => { - const users = generateUsers(1_000) - const cr = await db.i_users.addMany(users) - assert(cr.ok) + const users = generateUsers(1_000); + const cr = await db.i_users.addMany(users); + assert(cr.ok); - const docs = await db.i_users.getMany() - const ids = docs.result.map((doc) => doc.id) - const versionstamps = docs.result.map((doc) => doc.versionstamp) + const docs = await db.i_users.getMany(); + const ids = docs.result.map((doc) => doc.id); + const versionstamps = docs.result.map((doc) => doc.versionstamp); const { result } = await db.i_users.updateMany(mockUser1, { strategy: "replace", - }) + }); assert( result.some((cr) => cr.ok && ids.includes(cr.id) && !versionstamps.includes(cr.versionstamp) ), - ) + ); assert( result.some((cr) => !cr.ok), - ) + ); const byPrimary = await db.i_users.findByPrimaryIndex( "username", mockUser1.username, - ) + ); const { result: [bySecondary] } = await db.i_users.findBySecondaryIndex( "age", mockUser1.age, - ) - - assert(byPrimary !== null) - assert(byPrimary.value.username === mockUser1.username) - assert(byPrimary.value.address.country === mockUser1.address.country) - assert(byPrimary.value.address.city === mockUser1.address.city) - assert(byPrimary.value.address.houseNr === mockUser1.address.houseNr) - assert(byPrimary.value.address.street === mockUser1.address.street) - - assert(bySecondary !== null) - assert(bySecondary.value.username === mockUser1.username) - assert(bySecondary.value.address.country === mockUser1.address.country) - assert(bySecondary.value.address.city === mockUser1.address.city) - assert(bySecondary.value.address.houseNr === mockUser1.address.houseNr) - assert(bySecondary.value.address.street === mockUser1.address.street) - }) + ); + + assert(byPrimary !== null); + assert(byPrimary.value.username === mockUser1.username); + assert(byPrimary.value.address.country === mockUser1.address.country); + assert(byPrimary.value.address.city === mockUser1.address.city); + assert(byPrimary.value.address.houseNr === mockUser1.address.houseNr); + assert(byPrimary.value.address.street === mockUser1.address.street); + + assert(bySecondary !== null); + assert(bySecondary.value.username === mockUser1.username); + assert(bySecondary.value.address.country === mockUser1.address.country); + assert(bySecondary.value.address.city === mockUser1.address.city); + assert(bySecondary.value.address.houseNr === mockUser1.address.houseNr); + assert(bySecondary.value.address.street === mockUser1.address.street); + }); }, - ) + ); await t.step("Should successfully parse and update", async () => { await useDb(async (db) => { - const users = generateUsers(10) - let assertion = true + const users = generateUsers(10); + let assertion = true; - const cr = await db.zi_users.addMany(users) - assert(cr.ok) + const cr = await db.zi_users.addMany(users); + assert(cr.ok); - await db.zi_users.updateMany(mockUser1).catch(() => assertion = false) + await db.zi_users.updateMany(mockUser1).catch(() => assertion = false); - assert(assertion) - }) - }) + assert(assertion); + }); + }); await t.step("Should fail to parse and update document", async () => { await useDb(async (db) => { - const users = generateUsers(10) - let assertion = false + const users = generateUsers(10); + let assertion = false; - const cr = await db.zi_users.addMany(users) - assert(cr.ok) + const cr = await db.zi_users.addMany(users); + assert(cr.ok); await db.zi_users.updateMany(mockUserInvalid).catch(() => assertion = true - ) + ); - assert(assertion) - }) - }) -}) + assert(assertion); + }); + }); +}); diff --git a/tests/indexable_collection/updateManyBySecondaryOrder.test.ts b/tests/indexable_collection/updateManyBySecondaryOrder.test.ts index f4a3f32..4b0ddba 100644 --- a/tests/indexable_collection/updateManyBySecondaryOrder.test.ts +++ b/tests/indexable_collection/updateManyBySecondaryOrder.test.ts @@ -1,12 +1,12 @@ -import { assert, assertEquals } from "../test.deps.ts" +import { assert, assertEquals } from "../test.deps.ts"; import { mockUser1, mockUser2, mockUserInvalid, mockUsersWithAlteredAge, -} from "../mocks.ts" -import { generateUsers, useDb } from "../utils.ts" -import type { User } from "../models.ts" +} from "../mocks.ts"; +import { generateUsers, useDb } from "../utils.ts"; +import type { User } from "../models.ts"; Deno.test.ignore( "indexable_collection - updateManyBySecondaryOrder", @@ -15,12 +15,12 @@ Deno.test.ignore( "Should update documents of KvObject type using shallow merge by secondary order", async () => { await useDb(async (db) => { - const cr = await db.i_users.addMany(mockUsersWithAlteredAge) - assert(cr.ok) + const cr = await db.i_users.addMany(mockUsersWithAlteredAge); + assert(cr.ok); - const docs = await db.i_users.getMany() - const ids = docs.result.map((doc) => doc.id) - const versionstamps = docs.result.map((doc) => doc.versionstamp) + const docs = await db.i_users.getMany(); + const ids = docs.result.map((doc) => doc.id); + const versionstamps = docs.result.map((doc) => doc.versionstamp); const updateData = { address: { @@ -28,7 +28,7 @@ Deno.test.ignore( city: "Dublin", houseNr: null, }, - } + }; const { result } = await db.i_users.updateManyBySecondaryOrder( "age", @@ -37,44 +37,44 @@ Deno.test.ignore( limit: 2, strategy: "merge-shallow", }, - ) + ); assert( result.every((cr) => cr.ok && ids.includes(cr.id) && !versionstamps.includes(cr.versionstamp) ), - ) + ); await db.i_users.forEachBySecondaryOrder("age", (doc) => { - assert(doc.value.address.country === updateData.address.country) - assert(doc.value.address.city === updateData.address.city) - assert(doc.value.address.houseNr === updateData.address.houseNr) - assert(typeof doc.value.address.street === "undefined") + assert(doc.value.address.country === updateData.address.country); + assert(doc.value.address.city === updateData.address.city); + assert(doc.value.address.houseNr === updateData.address.houseNr); + assert(typeof doc.value.address.street === "undefined"); }, { limit: 2, - }) + }); const last = await db.i_users.getOneBySecondaryOrder("age", { reverse: true, - }) + }); - assert(last?.value.username === mockUser2.username) - assert(last.value.address.country === mockUser2.address.country) - }) + assert(last?.value.username === mockUser2.username); + assert(last.value.address.country === mockUser2.address.country); + }); }, - ) + ); await t.step( "Should update documents of KvObject type using deep merge", async () => { await useDb(async (db) => { - const cr = await db.i_users.addMany(mockUsersWithAlteredAge) - assert(cr.ok) + const cr = await db.i_users.addMany(mockUsersWithAlteredAge); + assert(cr.ok); - const docs = await db.i_users.getMany() - const ids = docs.result.map((doc) => doc.id) - const versionstamps = docs.result.map((doc) => doc.versionstamp) + const docs = await db.i_users.getMany(); + const ids = docs.result.map((doc) => doc.id); + const versionstamps = docs.result.map((doc) => doc.versionstamp); const updateData = { address: { @@ -82,7 +82,7 @@ Deno.test.ignore( city: "Dublin", houseNr: null, }, - } + }; const { result } = await db.i_users.updateManyBySecondaryOrder( "age", @@ -91,42 +91,42 @@ Deno.test.ignore( limit: 2, strategy: "merge", }, - ) + ); assert( result.every((cr) => cr.ok && ids.includes(cr.id) && !versionstamps.includes(cr.versionstamp) ), - ) + ); await db.i_users.forEachBySecondaryOrder("age", (doc) => { - assert(doc.value.address.country === updateData.address.country) - assert(doc.value.address.city === updateData.address.city) - assert(doc.value.address.houseNr === updateData.address.houseNr) - assert(doc.value.address.street !== undefined) - }, { limit: 2 }) + assert(doc.value.address.country === updateData.address.country); + assert(doc.value.address.city === updateData.address.city); + assert(doc.value.address.houseNr === updateData.address.houseNr); + assert(doc.value.address.street !== undefined); + }, { limit: 2 }); const last = await db.i_users.getOneBySecondaryOrder("age", { reverse: true, - }) + }); - assert(last?.value.username === mockUser2.username) - assert(last.value.address.country === mockUser2.address.country) - }) + assert(last?.value.username === mockUser2.username); + assert(last.value.address.country === mockUser2.address.country); + }); }, - ) + ); await t.step( "Should only update one document of type KvObject using replace (primary index collision)", async () => { await useDb(async (db) => { - const cr = await db.i_users.addMany(mockUsersWithAlteredAge) - assert(cr.ok) + const cr = await db.i_users.addMany(mockUsersWithAlteredAge); + assert(cr.ok); - const docs = await db.i_users.getMany() - const ids = docs.result.map((doc) => doc.id) - const versionstamps = docs.result.map((doc) => doc.versionstamp) + const docs = await db.i_users.getMany(); + const ids = docs.result.map((doc) => doc.id); + const versionstamps = docs.result.map((doc) => doc.versionstamp); const updateData: User = { username: "test", @@ -136,7 +136,7 @@ Deno.test.ignore( city: "Trondheim", houseNr: 10, }, - } + }; const { result: crs } = await db.i_users.updateManyBySecondaryOrder( "age", @@ -144,75 +144,75 @@ Deno.test.ignore( { strategy: "replace", }, - ) + ); assert( crs.some((cr) => cr.ok && ids.includes(cr.id) && !versionstamps.includes(cr.versionstamp) ), - ) + ); assert( crs.some((cr) => !cr.ok), - ) + ); const { result } = await db.i_users.mapBySecondaryOrder( "age", (doc) => doc.value, - ) - - assertEquals(result[0].username, updateData.username) - assertEquals(result[0].address.country, updateData.address.country) - assertEquals(result[0].address.city, updateData.address.city) - assertEquals(result[0].address.houseNr, updateData.address.houseNr) - assertEquals(result[0].address.street, updateData.address.street) - - assertEquals(result[1].username, mockUser1.username) - assertEquals(result[1].address.country, mockUser1.address.country) - assertEquals(result[1].address.city, mockUser1.address.city) - assertEquals(result[1].address.houseNr, mockUser1.address.houseNr) - assertEquals(result[1].address.street, mockUser1.address.street) - - assertEquals(result[2].username, mockUser2.username) - assertEquals(result[2].address.country, mockUser2.address.country) - assertEquals(result[2].address.city, mockUser2.address.city) - assertEquals(result[2].address.houseNr, mockUser2.address.houseNr) - assertEquals(result[2].address.street, mockUser2.address.street) - }) + ); + + assertEquals(result[0].username, updateData.username); + assertEquals(result[0].address.country, updateData.address.country); + assertEquals(result[0].address.city, updateData.address.city); + assertEquals(result[0].address.houseNr, updateData.address.houseNr); + assertEquals(result[0].address.street, updateData.address.street); + + assertEquals(result[1].username, mockUser1.username); + assertEquals(result[1].address.country, mockUser1.address.country); + assertEquals(result[1].address.city, mockUser1.address.city); + assertEquals(result[1].address.houseNr, mockUser1.address.houseNr); + assertEquals(result[1].address.street, mockUser1.address.street); + + assertEquals(result[2].username, mockUser2.username); + assertEquals(result[2].address.country, mockUser2.address.country); + assertEquals(result[2].address.city, mockUser2.address.city); + assertEquals(result[2].address.houseNr, mockUser2.address.houseNr); + assertEquals(result[2].address.street, mockUser2.address.street); + }); }, - ) + ); await t.step("Should successfully parse and update", async () => { await useDb(async (db) => { - const users = generateUsers(10) - let assertion = true + const users = generateUsers(10); + let assertion = true; - const cr = await db.zi_users.addMany(users) - assert(cr.ok) + const cr = await db.zi_users.addMany(users); + assert(cr.ok); await db.zi_users.updateManyBySecondaryOrder("age", mockUser1) - .catch(() => assertion = false) + .catch(() => assertion = false); - assert(assertion) - }) - }) + assert(assertion); + }); + }); await t.step("Should fail to parse and update document", async () => { await useDb(async (db) => { - const users = generateUsers(10) - let assertion = false + const users = generateUsers(10); + let assertion = false; - const cr = await db.zi_users.addMany(users) - assert(cr.ok) + const cr = await db.zi_users.addMany(users); + assert(cr.ok); await db.zi_users.updateManyBySecondaryOrder( "age", mockUserInvalid, - ).catch(() => assertion = true) + ).catch(() => assertion = true); - assert(assertion) - }) - }) + assert(assertion); + }); + }); }, -) +); diff --git a/tests/indexable_collection/updateOne.test.ts b/tests/indexable_collection/updateOne.test.ts index 7b266c8..0a0896e 100644 --- a/tests/indexable_collection/updateOne.test.ts +++ b/tests/indexable_collection/updateOne.test.ts @@ -1,18 +1,18 @@ -import { assert } from "../test.deps.ts" -import { mockUser1, mockUser2, mockUser3, mockUserInvalid } from "../mocks.ts" -import { sleep, useDb } from "../utils.ts" +import { assert } from "../test.deps.ts"; +import { mockUser1, mockUser2, mockUser3, mockUserInvalid } from "../mocks.ts"; +import { sleep, useDb } from "../utils.ts"; Deno.test("indexable_collection - updateOne", async (t) => { await t.step( "Should update only one document of KvObject type using shallow merge", async () => { await useDb(async (db) => { - const cr1 = await db.i_users.add(mockUser1) - await sleep(10) - const cr2 = await db.i_users.add(mockUser2) + const cr1 = await db.i_users.add(mockUser1); + await sleep(10); + const cr2 = await db.i_users.add(mockUser2); - assert(cr1.ok) - assert(cr2.ok) + assert(cr1.ok); + assert(cr2.ok); const updateData = { address: { @@ -20,43 +20,43 @@ Deno.test("indexable_collection - updateOne", async (t) => { city: "Dublin", houseNr: null, }, - } + }; const updateCr = await db.i_users.updateOne(updateData, { strategy: "merge-shallow", - }) + }); - assert(updateCr.ok) + assert(updateCr.ok); - const doc1 = await db.i_users.find(cr1.id) - const doc2 = await db.i_users.find(cr2.id) + const doc1 = await db.i_users.find(cr1.id); + const doc2 = await db.i_users.find(cr2.id); - assert(doc1) - assert(doc2) + assert(doc1); + assert(doc2); - assert(doc1.value.address.country === updateData.address.country) - assert(doc1.value.address.city === updateData.address.city) - assert(doc1.value.address.houseNr === updateData.address.houseNr) - assert(doc1.value.address.street === undefined) + assert(doc1.value.address.country === updateData.address.country); + assert(doc1.value.address.city === updateData.address.city); + assert(doc1.value.address.houseNr === updateData.address.houseNr); + assert(doc1.value.address.street === undefined); - assert(doc2.value.address.country === mockUser2.address.country) - assert(doc2.value.address.city === mockUser2.address.city) - assert(doc2.value.address.houseNr === mockUser2.address.houseNr) - assert(doc2.value.address.street === mockUser2.address.street) - }) + assert(doc2.value.address.country === mockUser2.address.country); + assert(doc2.value.address.city === mockUser2.address.city); + assert(doc2.value.address.houseNr === mockUser2.address.houseNr); + assert(doc2.value.address.street === mockUser2.address.street); + }); }, - ) + ); await t.step( "Should update only one document of KvObject type using deep merge", async () => { await useDb(async (db) => { - const cr1 = await db.i_users.add(mockUser1) - await sleep(10) - const cr2 = await db.i_users.add(mockUser2) + const cr1 = await db.i_users.add(mockUser1); + await sleep(10); + const cr2 = await db.i_users.add(mockUser2); - assert(cr1.ok) - assert(cr2.ok) + assert(cr1.ok); + assert(cr2.ok); const updateData = { address: { @@ -64,106 +64,108 @@ Deno.test("indexable_collection - updateOne", async (t) => { city: "Dublin", houseNr: null, }, - } + }; const updateCr = await db.i_users.updateOne(updateData, { strategy: "merge", - }) + }); - assert(updateCr.ok) + assert(updateCr.ok); - const doc1 = await db.i_users.find(cr1.id) - const doc2 = await db.i_users.find(cr2.id) + const doc1 = await db.i_users.find(cr1.id); + const doc2 = await db.i_users.find(cr2.id); - assert(doc1) - assert(doc2) + assert(doc1); + assert(doc2); - assert(doc1.value.address.country === updateData.address.country) - assert(doc1.value.address.city === updateData.address.city) - assert(doc1.value.address.houseNr === updateData.address.houseNr) - assert(doc1.value.address.street === mockUser1.address.street) + assert(doc1.value.address.country === updateData.address.country); + assert(doc1.value.address.city === updateData.address.city); + assert(doc1.value.address.houseNr === updateData.address.houseNr); + assert(doc1.value.address.street === mockUser1.address.street); - assert(doc2.value.address.country === mockUser2.address.country) - assert(doc2.value.address.city === mockUser2.address.city) - assert(doc2.value.address.houseNr === mockUser2.address.houseNr) - assert(doc2.value.address.street === mockUser2.address.street) - }) + assert(doc2.value.address.country === mockUser2.address.country); + assert(doc2.value.address.city === mockUser2.address.city); + assert(doc2.value.address.houseNr === mockUser2.address.houseNr); + assert(doc2.value.address.street === mockUser2.address.street); + }); }, - ) + ); await t.step( "Should update only one document of KvObject type using replace", async () => { await useDb(async (db) => { - const cr1 = await db.i_users.add(mockUser1) - await sleep(10) - const cr2 = await db.i_users.add(mockUser2) + const cr1 = await db.i_users.add(mockUser1); + await sleep(10); + const cr2 = await db.i_users.add(mockUser2); - assert(cr1.ok) - assert(cr2.ok) + assert(cr1.ok); + assert(cr2.ok); - const updateData = mockUser3 + const updateData = mockUser3; const updateCr = await db.i_users.updateOne(updateData, { strategy: "replace", - }) - - assert(updateCr.ok) - - const doc1 = await db.i_users.find(cr1.id) - const doc2 = await db.i_users.find(cr2.id) - - assert(doc1) - assert(doc2) - - assert(doc1.value.username === updateData.username) - assert(doc1.value.age === updateData.age) - assert(doc1.value.address.country === updateData.address.country) - assert(doc1.value.address.city === updateData.address.city) - assert(doc1.value.address.houseNr === updateData.address.houseNr) - assert(doc1.value.address.street === undefined) - - assert(doc2.value.username === mockUser2.username) - assert(doc2.value.age === mockUser2.age) - assert(doc2.value.address.country === mockUser2.address.country) - assert(doc2.value.address.city === mockUser2.address.city) - assert(doc2.value.address.houseNr === mockUser2.address.houseNr) - assert(doc2.value.address.street === mockUser2.address.street) - }) + }); + + assert(updateCr.ok); + + const doc1 = await db.i_users.find(cr1.id); + const doc2 = await db.i_users.find(cr2.id); + + assert(doc1); + assert(doc2); + + assert(doc1.value.username === updateData.username); + assert(doc1.value.age === updateData.age); + assert(doc1.value.address.country === updateData.address.country); + assert(doc1.value.address.city === updateData.address.city); + assert(doc1.value.address.houseNr === updateData.address.houseNr); + assert(doc1.value.address.street === undefined); + + assert(doc2.value.username === mockUser2.username); + assert(doc2.value.age === mockUser2.age); + assert(doc2.value.address.country === mockUser2.address.country); + assert(doc2.value.address.city === mockUser2.address.city); + assert(doc2.value.address.houseNr === mockUser2.address.houseNr); + assert(doc2.value.address.street === mockUser2.address.street); + }); }, - ) + ); await t.step("Should successfully parse and update", async () => { await useDb(async (db) => { - let assertion = true + let assertion = true; - const cr1 = await db.zi_users.add(mockUser1) - await sleep(10) - const cr2 = await db.zi_users.add(mockUser2) + const cr1 = await db.zi_users.add(mockUser1); + await sleep(10); + const cr2 = await db.zi_users.add(mockUser2); - assert(cr1.ok) - assert(cr2.ok) + assert(cr1.ok); + assert(cr2.ok); - await db.zi_users.updateOne(mockUser1).catch(() => assertion = false) + await db.zi_users.updateOne(mockUser1).catch(() => assertion = false); - assert(assertion) - }) - }) + assert(assertion); + }); + }); await t.step("Should fail to parse and update document", async () => { await useDb(async (db) => { - let assertion = false + let assertion = false; - const cr1 = await db.zi_users.add(mockUser1) - await sleep(10) - const cr2 = await db.zi_users.add(mockUser2) + const cr1 = await db.zi_users.add(mockUser1); + await sleep(10); + const cr2 = await db.zi_users.add(mockUser2); - assert(cr1.ok) - assert(cr2.ok) + assert(cr1.ok); + assert(cr2.ok); - await db.zi_users.updateOne(mockUserInvalid).catch(() => assertion = true) + await db.zi_users.updateOne(mockUserInvalid).catch(() => + assertion = true + ); - assert(assertion) - }) - }) -}) + assert(assertion); + }); + }); +}); diff --git a/tests/indexable_collection/updateOneBySecondaryIndex.test.ts b/tests/indexable_collection/updateOneBySecondaryIndex.test.ts index 9b43d7e..11cb078 100644 --- a/tests/indexable_collection/updateOneBySecondaryIndex.test.ts +++ b/tests/indexable_collection/updateOneBySecondaryIndex.test.ts @@ -1,18 +1,18 @@ -import { assert } from "../test.deps.ts" -import { mockUser1, mockUser2, mockUser3, mockUserInvalid } from "../mocks.ts" -import { sleep, useDb } from "../utils.ts" +import { assert } from "../test.deps.ts"; +import { mockUser1, mockUser2, mockUser3, mockUserInvalid } from "../mocks.ts"; +import { sleep, useDb } from "../utils.ts"; Deno.test("indexable_collection - updateOneBySecondaryIndex", async (t) => { await t.step( "Should update only one document of KvObject type using shallow merge", async () => { await useDb(async (db) => { - const cr1 = await db.i_users.add(mockUser1) - await sleep(10) - const cr2 = await db.i_users.add(mockUser2) + const cr1 = await db.i_users.add(mockUser1); + await sleep(10); + const cr2 = await db.i_users.add(mockUser2); - assert(cr1.ok) - assert(cr2.ok) + assert(cr1.ok); + assert(cr2.ok); const updateData = { address: { @@ -20,7 +20,7 @@ Deno.test("indexable_collection - updateOneBySecondaryIndex", async (t) => { city: "Dublin", houseNr: null, }, - } + }; const updateCr = await db.i_users.updateOneBySecondaryIndex( "age", @@ -29,39 +29,39 @@ Deno.test("indexable_collection - updateOneBySecondaryIndex", async (t) => { { strategy: "merge-shallow", }, - ) + ); - assert(updateCr.ok) + assert(updateCr.ok); - const doc1 = await db.i_users.find(cr1.id) - const doc2 = await db.i_users.find(cr2.id) + const doc1 = await db.i_users.find(cr1.id); + const doc2 = await db.i_users.find(cr2.id); - assert(doc1) - assert(doc2) + assert(doc1); + assert(doc2); - assert(doc1.value.address.country === updateData.address.country) - assert(doc1.value.address.city === updateData.address.city) - assert(doc1.value.address.houseNr === updateData.address.houseNr) - assert(doc1.value.address.street === undefined) + assert(doc1.value.address.country === updateData.address.country); + assert(doc1.value.address.city === updateData.address.city); + assert(doc1.value.address.houseNr === updateData.address.houseNr); + assert(doc1.value.address.street === undefined); - assert(doc2.value.address.country === mockUser2.address.country) - assert(doc2.value.address.city === mockUser2.address.city) - assert(doc2.value.address.houseNr === mockUser2.address.houseNr) - assert(doc2.value.address.street === mockUser2.address.street) - }) + assert(doc2.value.address.country === mockUser2.address.country); + assert(doc2.value.address.city === mockUser2.address.city); + assert(doc2.value.address.houseNr === mockUser2.address.houseNr); + assert(doc2.value.address.street === mockUser2.address.street); + }); }, - ) + ); await t.step( "Should update only one document of KvObject type using deep merge", async () => { await useDb(async (db) => { - const cr1 = await db.i_users.add(mockUser1) - await sleep(10) - const cr2 = await db.i_users.add(mockUser2) + const cr1 = await db.i_users.add(mockUser1); + await sleep(10); + const cr2 = await db.i_users.add(mockUser2); - assert(cr1.ok) - assert(cr2.ok) + assert(cr1.ok); + assert(cr2.ok); const updateData = { address: { @@ -69,7 +69,7 @@ Deno.test("indexable_collection - updateOneBySecondaryIndex", async (t) => { city: "Dublin", houseNr: null, }, - } + }; const updateCr = await db.i_users.updateOneBySecondaryIndex( "age", @@ -78,41 +78,41 @@ Deno.test("indexable_collection - updateOneBySecondaryIndex", async (t) => { { strategy: "merge", }, - ) + ); - assert(updateCr.ok) + assert(updateCr.ok); - const doc1 = await db.i_users.find(cr1.id) - const doc2 = await db.i_users.find(cr2.id) + const doc1 = await db.i_users.find(cr1.id); + const doc2 = await db.i_users.find(cr2.id); - assert(doc1) - assert(doc2) + assert(doc1); + assert(doc2); - assert(doc1.value.address.country === updateData.address.country) - assert(doc1.value.address.city === updateData.address.city) - assert(doc1.value.address.houseNr === updateData.address.houseNr) - assert(doc1.value.address.street === mockUser1.address.street) + assert(doc1.value.address.country === updateData.address.country); + assert(doc1.value.address.city === updateData.address.city); + assert(doc1.value.address.houseNr === updateData.address.houseNr); + assert(doc1.value.address.street === mockUser1.address.street); - assert(doc2.value.address.country === mockUser2.address.country) - assert(doc2.value.address.city === mockUser2.address.city) - assert(doc2.value.address.houseNr === mockUser2.address.houseNr) - assert(doc2.value.address.street === mockUser2.address.street) - }) + assert(doc2.value.address.country === mockUser2.address.country); + assert(doc2.value.address.city === mockUser2.address.city); + assert(doc2.value.address.houseNr === mockUser2.address.houseNr); + assert(doc2.value.address.street === mockUser2.address.street); + }); }, - ) + ); await t.step( "Should update only one document of KvObject type using replace", async () => { await useDb(async (db) => { - const cr1 = await db.i_users.add(mockUser1) - await sleep(10) - const cr2 = await db.i_users.add(mockUser2) + const cr1 = await db.i_users.add(mockUser1); + await sleep(10); + const cr2 = await db.i_users.add(mockUser2); - assert(cr1.ok) - assert(cr2.ok) + assert(cr1.ok); + assert(cr2.ok); - const updateData = mockUser3 + const updateData = mockUser3; const updateCr = await db.i_users.updateOneBySecondaryIndex( "age", @@ -121,72 +121,72 @@ Deno.test("indexable_collection - updateOneBySecondaryIndex", async (t) => { { strategy: "replace", }, - ) - - assert(updateCr.ok) - - const doc1 = await db.i_users.find(cr1.id) - const doc2 = await db.i_users.find(cr2.id) - - assert(doc1) - assert(doc2) - - assert(doc1.value.username === updateData.username) - assert(doc1.value.age === updateData.age) - assert(doc1.value.address.country === updateData.address.country) - assert(doc1.value.address.city === updateData.address.city) - assert(doc1.value.address.houseNr === updateData.address.houseNr) - assert(doc1.value.address.street === undefined) - - assert(doc2.value.username === mockUser2.username) - assert(doc2.value.age === mockUser2.age) - assert(doc2.value.address.country === mockUser2.address.country) - assert(doc2.value.address.city === mockUser2.address.city) - assert(doc2.value.address.houseNr === mockUser2.address.houseNr) - assert(doc2.value.address.street === mockUser2.address.street) - }) + ); + + assert(updateCr.ok); + + const doc1 = await db.i_users.find(cr1.id); + const doc2 = await db.i_users.find(cr2.id); + + assert(doc1); + assert(doc2); + + assert(doc1.value.username === updateData.username); + assert(doc1.value.age === updateData.age); + assert(doc1.value.address.country === updateData.address.country); + assert(doc1.value.address.city === updateData.address.city); + assert(doc1.value.address.houseNr === updateData.address.houseNr); + assert(doc1.value.address.street === undefined); + + assert(doc2.value.username === mockUser2.username); + assert(doc2.value.age === mockUser2.age); + assert(doc2.value.address.country === mockUser2.address.country); + assert(doc2.value.address.city === mockUser2.address.city); + assert(doc2.value.address.houseNr === mockUser2.address.houseNr); + assert(doc2.value.address.street === mockUser2.address.street); + }); }, - ) + ); await t.step("Should successfully parse and update", async () => { await useDb(async (db) => { - let assertion = true + let assertion = true; - const cr1 = await db.zi_users.add(mockUser1) - await sleep(10) - const cr2 = await db.zi_users.add(mockUser2) + const cr1 = await db.zi_users.add(mockUser1); + await sleep(10); + const cr2 = await db.zi_users.add(mockUser2); - assert(cr1.ok) - assert(cr2.ok) + assert(cr1.ok); + assert(cr2.ok); await db.zi_users.updateOneBySecondaryIndex( "age", mockUser2.age, mockUser1, - ).catch(() => assertion = false) + ).catch(() => assertion = false); - assert(assertion) - }) - }) + assert(assertion); + }); + }); await t.step("Should fail to parse and update document", async () => { await useDb(async (db) => { - let assertion = false + let assertion = false; - const cr1 = await db.zi_users.add(mockUser1) - await sleep(10) - const cr2 = await db.zi_users.add(mockUser2) + const cr1 = await db.zi_users.add(mockUser1); + await sleep(10); + const cr2 = await db.zi_users.add(mockUser2); - assert(cr1.ok) - assert(cr2.ok) + assert(cr1.ok); + assert(cr2.ok); await db.zi_users.updateOneBySecondaryIndex( "age", mockUser2.age, mockUserInvalid, - ).catch(() => assertion = true) + ).catch(() => assertion = true); - assert(assertion) - }) - }) -}) + assert(assertion); + }); + }); +}); diff --git a/tests/indexable_collection/updateOneBySecondaryOrder.test.ts b/tests/indexable_collection/updateOneBySecondaryOrder.test.ts index 80450e0..9020442 100644 --- a/tests/indexable_collection/updateOneBySecondaryOrder.test.ts +++ b/tests/indexable_collection/updateOneBySecondaryOrder.test.ts @@ -1,21 +1,21 @@ -import { assert } from "../test.deps.ts" +import { assert } from "../test.deps.ts"; import { mockUser1, mockUser2, mockUser3, mockUserInvalid, mockUsersWithAlteredAge, -} from "../mocks.ts" -import { useDb } from "../utils.ts" -import type { User } from "../models.ts" +} from "../mocks.ts"; +import { useDb } from "../utils.ts"; +import type { User } from "../models.ts"; Deno.test("indexable_collection - updateOneBySecondaryOrder", async (t) => { await t.step( "Should update only one document of KvObject type using shallow merge", async () => { await useDb(async (db) => { - const cr = await db.i_users.addMany(mockUsersWithAlteredAge) - assert(cr.ok) + const cr = await db.i_users.addMany(mockUsersWithAlteredAge); + assert(cr.ok); const updateData = { address: { @@ -23,7 +23,7 @@ Deno.test("indexable_collection - updateOneBySecondaryOrder", async (t) => { city: "Dublin", houseNr: null, }, - } + }; const updateCr = await db.i_users.updateOneBySecondaryOrder( "age", @@ -31,39 +31,39 @@ Deno.test("indexable_collection - updateOneBySecondaryOrder", async (t) => { { strategy: "merge-shallow", }, - ) + ); - assert(updateCr.ok) + assert(updateCr.ok); const { result } = await db.i_users.mapBySecondaryOrder( "age", (doc) => doc.value, - ) - - assert(result[0].address.country === updateData.address.country) - assert(result[0].address.city === updateData.address.city) - assert(result[0].address.houseNr === updateData.address.houseNr) - assert(result[0].address.street === undefined) - - assert(result[1].address.country === mockUser1.address.country) - assert(result[1].address.city === mockUser1.address.city) - assert(result[1].address.houseNr === mockUser1.address.houseNr) - assert(result[1].address.street === mockUser1.address.street) - - assert(result[2].address.country === mockUser2.address.country) - assert(result[2].address.city === mockUser2.address.city) - assert(result[2].address.houseNr === mockUser2.address.houseNr) - assert(result[2].address.street === mockUser2.address.street) - }) + ); + + assert(result[0].address.country === updateData.address.country); + assert(result[0].address.city === updateData.address.city); + assert(result[0].address.houseNr === updateData.address.houseNr); + assert(result[0].address.street === undefined); + + assert(result[1].address.country === mockUser1.address.country); + assert(result[1].address.city === mockUser1.address.city); + assert(result[1].address.houseNr === mockUser1.address.houseNr); + assert(result[1].address.street === mockUser1.address.street); + + assert(result[2].address.country === mockUser2.address.country); + assert(result[2].address.city === mockUser2.address.city); + assert(result[2].address.houseNr === mockUser2.address.houseNr); + assert(result[2].address.street === mockUser2.address.street); + }); }, - ) + ); await t.step( "Should update only one document of KvObject type using deep merge", async () => { await useDb(async (db) => { - const cr = await db.i_users.addMany(mockUsersWithAlteredAge) - assert(cr.ok) + const cr = await db.i_users.addMany(mockUsersWithAlteredAge); + assert(cr.ok); const updateData = { address: { @@ -71,7 +71,7 @@ Deno.test("indexable_collection - updateOneBySecondaryOrder", async (t) => { city: "Dublin", houseNr: null, }, - } + }; const updateCr = await db.i_users.updateOneBySecondaryOrder( "age", @@ -80,39 +80,39 @@ Deno.test("indexable_collection - updateOneBySecondaryOrder", async (t) => { offset: 1, strategy: "merge", }, - ) + ); - assert(updateCr.ok) + assert(updateCr.ok); const { result } = await db.i_users.mapBySecondaryOrder( "age", (doc) => doc.value, - ) - - assert(result[1].address.country === updateData.address.country) - assert(result[1].address.city === updateData.address.city) - assert(result[1].address.houseNr === updateData.address.houseNr) - assert(result[1].address.street === mockUser1.address.street) - - assert(result[0].address.country === mockUser3.address.country) - assert(result[0].address.city === mockUser3.address.city) - assert(result[0].address.houseNr === mockUser3.address.houseNr) - assert(result[0].address.street === mockUser3.address.street) - - assert(result[2].address.country === mockUser2.address.country) - assert(result[2].address.city === mockUser2.address.city) - assert(result[2].address.houseNr === mockUser2.address.houseNr) - assert(result[2].address.street === mockUser2.address.street) - }) + ); + + assert(result[1].address.country === updateData.address.country); + assert(result[1].address.city === updateData.address.city); + assert(result[1].address.houseNr === updateData.address.houseNr); + assert(result[1].address.street === mockUser1.address.street); + + assert(result[0].address.country === mockUser3.address.country); + assert(result[0].address.city === mockUser3.address.city); + assert(result[0].address.houseNr === mockUser3.address.houseNr); + assert(result[0].address.street === mockUser3.address.street); + + assert(result[2].address.country === mockUser2.address.country); + assert(result[2].address.city === mockUser2.address.city); + assert(result[2].address.houseNr === mockUser2.address.houseNr); + assert(result[2].address.street === mockUser2.address.street); + }); }, - ) + ); await t.step( "Should update only one document of KvObject type using replace", async () => { await useDb(async (db) => { - const cr = await db.i_users.addMany(mockUsersWithAlteredAge) - assert(cr.ok) + const cr = await db.i_users.addMany(mockUsersWithAlteredAge); + assert(cr.ok); const updateData: User = { username: "test", @@ -122,7 +122,7 @@ Deno.test("indexable_collection - updateOneBySecondaryOrder", async (t) => { city: "Bern", houseNr: null, }, - } + }; const updateCr = await db.i_users.updateOneBySecondaryOrder( "age", @@ -130,43 +130,43 @@ Deno.test("indexable_collection - updateOneBySecondaryOrder", async (t) => { { strategy: "replace", }, - ) + ); - assert(updateCr.ok) + assert(updateCr.ok); const { result } = await db.i_users.mapBySecondaryOrder( "age", (doc) => doc.value, - ) - - assert(result[0].username === updateData.username) - assert(result[0].age === updateData.age) - assert(result[0].address.country === updateData.address.country) - assert(result[0].address.city === updateData.address.city) - assert(result[0].address.houseNr === updateData.address.houseNr) - assert(result[0].address.street === undefined) - - assert(result[1].username === mockUser1.username) - assert(result[1].address.country === mockUser1.address.country) - assert(result[1].address.city === mockUser1.address.city) - assert(result[1].address.houseNr === mockUser1.address.houseNr) - assert(result[1].address.street === mockUser1.address.street) - - assert(result[2].username === mockUser2.username) - assert(result[2].address.country === mockUser2.address.country) - assert(result[2].address.city === mockUser2.address.city) - assert(result[2].address.houseNr === mockUser2.address.houseNr) - assert(result[2].address.street === mockUser2.address.street) - }) + ); + + assert(result[0].username === updateData.username); + assert(result[0].age === updateData.age); + assert(result[0].address.country === updateData.address.country); + assert(result[0].address.city === updateData.address.city); + assert(result[0].address.houseNr === updateData.address.houseNr); + assert(result[0].address.street === undefined); + + assert(result[1].username === mockUser1.username); + assert(result[1].address.country === mockUser1.address.country); + assert(result[1].address.city === mockUser1.address.city); + assert(result[1].address.houseNr === mockUser1.address.houseNr); + assert(result[1].address.street === mockUser1.address.street); + + assert(result[2].username === mockUser2.username); + assert(result[2].address.country === mockUser2.address.country); + assert(result[2].address.city === mockUser2.address.city); + assert(result[2].address.houseNr === mockUser2.address.houseNr); + assert(result[2].address.street === mockUser2.address.street); + }); }, - ) + ); await t.step("Should successfully parse and update", async () => { await useDb(async (db) => { - let assertion = true + let assertion = true; - const cr = await db.zi_users.addMany(mockUsersWithAlteredAge) - assert(cr.ok) + const cr = await db.zi_users.addMany(mockUsersWithAlteredAge); + assert(cr.ok); const updateData: User = { username: "test", @@ -176,30 +176,30 @@ Deno.test("indexable_collection - updateOneBySecondaryOrder", async (t) => { city: "Bern", houseNr: null, }, - } + }; await db.zi_users.updateOneBySecondaryOrder( "age", updateData, - ).catch(() => assertion = false) + ).catch(() => assertion = false); - assert(assertion) - }) - }) + assert(assertion); + }); + }); await t.step("Should fail to parse and update document", async () => { await useDb(async (db) => { - let assertion = false + let assertion = false; - const cr = await db.zi_users.addMany(mockUsersWithAlteredAge) - assert(cr.ok) + const cr = await db.zi_users.addMany(mockUsersWithAlteredAge); + assert(cr.ok); await db.zi_users.updateOneBySecondaryOrder( "age", mockUserInvalid, - ).catch(() => assertion = true) + ).catch(() => assertion = true); - assert(assertion) - }) - }) -}) + assert(assertion); + }); + }); +}); diff --git a/tests/indexable_collection/upsert.test.ts b/tests/indexable_collection/upsert.test.ts index 8ce4c61..070c4a6 100644 --- a/tests/indexable_collection/upsert.test.ts +++ b/tests/indexable_collection/upsert.test.ts @@ -1,35 +1,35 @@ -import { assert } from "../test.deps.ts" -import { mockUser1, mockUser2, mockUser3 } from "../mocks.ts" -import type { User } from "../models.ts" -import { useDb } from "../utils.ts" +import { assert } from "../test.deps.ts"; +import { mockUser1, mockUser2, mockUser3 } from "../mocks.ts"; +import type { User } from "../models.ts"; +import { useDb } from "../utils.ts"; Deno.test("indexable_collection - upsert", async (t) => { await t.step("Should set new doucment entry by id", async () => { await useDb(async (db) => { - const id = "id" + const id = "id"; const cr = await db.i_users.upsert({ id: id, set: mockUser2, update: mockUser3, - }) + }); - assert(cr.ok) + assert(cr.ok); - const doc = await db.i_users.find(id) - assert(doc !== null) - assert(doc.value.username === mockUser2.username) - }) - }) + const doc = await db.i_users.find(id); + assert(doc !== null); + assert(doc.value.username === mockUser2.username); + }); + }); await t.step( "Should update existing document entry by id using shallow merge", async () => { await useDb(async (db) => { - const id = "id" + const id = "id"; - const cr1 = await db.i_users.set(id, mockUser1) - assert(cr1.ok) + const cr1 = await db.i_users.set(id, mockUser1); + assert(cr1.ok); const updateData: Partial = { address: { @@ -37,7 +37,7 @@ Deno.test("indexable_collection - upsert", async (t) => { city: "London", houseNr: null, }, - } + }; const cr2 = await db.i_users.upsert({ id: id, @@ -45,30 +45,30 @@ Deno.test("indexable_collection - upsert", async (t) => { update: updateData, }, { strategy: "merge-shallow", - }) - - assert(cr2.ok) - - const doc = await db.i_users.find(id) - assert(doc !== null) - assert(doc.value.username === mockUser1.username) - assert(doc.value.age === mockUser1.age) - assert(doc.value.address.city === updateData.address?.city) - assert(doc.value.address.country === updateData.address.country) - assert(doc.value.address.houseNr === updateData.address.houseNr) - assert(doc.value.address.street === undefined) - }) + }); + + assert(cr2.ok); + + const doc = await db.i_users.find(id); + assert(doc !== null); + assert(doc.value.username === mockUser1.username); + assert(doc.value.age === mockUser1.age); + assert(doc.value.address.city === updateData.address?.city); + assert(doc.value.address.country === updateData.address.country); + assert(doc.value.address.houseNr === updateData.address.houseNr); + assert(doc.value.address.street === undefined); + }); }, - ) + ); await t.step( "Should update existing document entry by id using deep merge", async () => { await useDb(async (db) => { - const id = "id" + const id = "id"; - const cr1 = await db.i_users.set(id, mockUser1) - assert(cr1.ok) + const cr1 = await db.i_users.set(id, mockUser1); + assert(cr1.ok); const updateData: Partial = { address: { @@ -76,7 +76,7 @@ Deno.test("indexable_collection - upsert", async (t) => { city: "London", houseNr: null, }, - } + }; const cr2 = await db.i_users.upsert({ id: id, @@ -84,30 +84,30 @@ Deno.test("indexable_collection - upsert", async (t) => { update: updateData, }, { strategy: "merge", - }) - - assert(cr2.ok) - - const doc = await db.i_users.find(id) - assert(doc !== null) - assert(doc.value.username === mockUser1.username) - assert(doc.value.age === mockUser1.age) - assert(doc.value.address.city === updateData.address?.city) - assert(doc.value.address.country === updateData.address.country) - assert(doc.value.address.houseNr === updateData.address.houseNr) - assert(doc.value.address.street === mockUser1.address.street) - }) + }); + + assert(cr2.ok); + + const doc = await db.i_users.find(id); + assert(doc !== null); + assert(doc.value.username === mockUser1.username); + assert(doc.value.age === mockUser1.age); + assert(doc.value.address.city === updateData.address?.city); + assert(doc.value.address.country === updateData.address.country); + assert(doc.value.address.houseNr === updateData.address.houseNr); + assert(doc.value.address.street === mockUser1.address.street); + }); }, - ) + ); await t.step( "Should update existing document entry by id using replace", async () => { await useDb(async (db) => { - const id = "id" + const id = "id"; - const cr1 = await db.i_users.set(id, mockUser1) - assert(cr1.ok) + const cr1 = await db.i_users.set(id, mockUser1); + assert(cr1.ok); const cr2 = await db.i_users.upsert({ id: id, @@ -115,19 +115,19 @@ Deno.test("indexable_collection - upsert", async (t) => { update: mockUser3, }, { strategy: "replace", - }) - - assert(cr2.ok) - - const doc = await db.i_users.find(id) - assert(doc !== null) - assert(doc.value.username === mockUser3.username) - assert(doc.value.age === mockUser3.age) - assert(doc.value.address.city === mockUser3.address?.city) - assert(doc.value.address.country === mockUser3.address.country) - assert(doc.value.address.houseNr === mockUser3.address.houseNr) - assert(doc.value.address.street === mockUser3.address.street) - }) + }); + + assert(cr2.ok); + + const doc = await db.i_users.find(id); + assert(doc !== null); + assert(doc.value.username === mockUser3.username); + assert(doc.value.age === mockUser3.age); + assert(doc.value.address.city === mockUser3.address?.city); + assert(doc.value.address.country === mockUser3.address.country); + assert(doc.value.address.houseNr === mockUser3.address.houseNr); + assert(doc.value.address.street === mockUser3.address.street); + }); }, - ) -}) + ); +}); diff --git a/tests/indexable_collection/upsertByPrimaryIndex.test.ts b/tests/indexable_collection/upsertByPrimaryIndex.test.ts index a0dba20..ab957ea 100644 --- a/tests/indexable_collection/upsertByPrimaryIndex.test.ts +++ b/tests/indexable_collection/upsertByPrimaryIndex.test.ts @@ -1,7 +1,7 @@ -import { assert } from "../test.deps.ts" -import { mockUser1, mockUser2, mockUser3 } from "../mocks.ts" -import type { User } from "../models.ts" -import { useDb } from "../utils.ts" +import { assert } from "../test.deps.ts"; +import { mockUser1, mockUser2, mockUser3 } from "../mocks.ts"; +import type { User } from "../models.ts"; +import { useDb } from "../utils.ts"; Deno.test("indexable_collection - upsertByPrimaryIndex", async (t) => { await t.step("Should set new doucment entry by primary index", async () => { @@ -10,24 +10,24 @@ Deno.test("indexable_collection - upsertByPrimaryIndex", async (t) => { index: ["username", mockUser1.username], set: mockUser2, update: mockUser3, - }) + }); - assert(cr.ok) + assert(cr.ok); - const doc = await db.i_users.find(cr.id) - assert(doc !== null) - assert(doc.value.username === mockUser2.username) - }) - }) + const doc = await db.i_users.find(cr.id); + assert(doc !== null); + assert(doc.value.username === mockUser2.username); + }); + }); await t.step( "Should update existing document entry by primary index using shallow merge", async () => { await useDb(async (db) => { - const id = "id" + const id = "id"; - const cr1 = await db.i_users.set(id, mockUser1) - assert(cr1.ok) + const cr1 = await db.i_users.set(id, mockUser1); + assert(cr1.ok); const updateData: Partial = { address: { @@ -35,7 +35,7 @@ Deno.test("indexable_collection - upsertByPrimaryIndex", async (t) => { city: "London", houseNr: null, }, - } + }; const cr2 = await db.i_users.upsertByPrimaryIndex({ id: id, @@ -44,30 +44,30 @@ Deno.test("indexable_collection - upsertByPrimaryIndex", async (t) => { update: updateData, }, { strategy: "merge-shallow", - }) - - assert(cr2.ok) - - const doc = await db.i_users.find(id) - assert(doc !== null) - assert(doc.value.username === mockUser1.username) - assert(doc.value.age === mockUser1.age) - assert(doc.value.address.city === updateData.address?.city) - assert(doc.value.address.country === updateData.address.country) - assert(doc.value.address.houseNr === updateData.address.houseNr) - assert(doc.value.address.street === undefined) - }) + }); + + assert(cr2.ok); + + const doc = await db.i_users.find(id); + assert(doc !== null); + assert(doc.value.username === mockUser1.username); + assert(doc.value.age === mockUser1.age); + assert(doc.value.address.city === updateData.address?.city); + assert(doc.value.address.country === updateData.address.country); + assert(doc.value.address.houseNr === updateData.address.houseNr); + assert(doc.value.address.street === undefined); + }); }, - ) + ); await t.step( "Should update existing document entry by primary index using deep merge", async () => { await useDb(async (db) => { - const id = "id" + const id = "id"; - const cr1 = await db.i_users.set(id, mockUser1) - assert(cr1.ok) + const cr1 = await db.i_users.set(id, mockUser1); + assert(cr1.ok); const updateData: Partial = { address: { @@ -75,7 +75,7 @@ Deno.test("indexable_collection - upsertByPrimaryIndex", async (t) => { city: "London", houseNr: null, }, - } + }; const cr2 = await db.i_users.upsertByPrimaryIndex({ id: id, @@ -84,30 +84,30 @@ Deno.test("indexable_collection - upsertByPrimaryIndex", async (t) => { update: updateData, }, { strategy: "merge", - }) - - assert(cr2.ok) - - const doc = await db.i_users.find(id) - assert(doc !== null) - assert(doc.value.username === mockUser1.username) - assert(doc.value.age === mockUser1.age) - assert(doc.value.address.city === updateData.address?.city) - assert(doc.value.address.country === updateData.address.country) - assert(doc.value.address.houseNr === updateData.address.houseNr) - assert(doc.value.address.street === mockUser1.address.street) - }) + }); + + assert(cr2.ok); + + const doc = await db.i_users.find(id); + assert(doc !== null); + assert(doc.value.username === mockUser1.username); + assert(doc.value.age === mockUser1.age); + assert(doc.value.address.city === updateData.address?.city); + assert(doc.value.address.country === updateData.address.country); + assert(doc.value.address.houseNr === updateData.address.houseNr); + assert(doc.value.address.street === mockUser1.address.street); + }); }, - ) + ); await t.step( "Should update existing document entry by primary index using replace", async () => { await useDb(async (db) => { - const id = "id" + const id = "id"; - const cr1 = await db.i_users.set(id, mockUser1) - assert(cr1.ok) + const cr1 = await db.i_users.set(id, mockUser1); + assert(cr1.ok); const cr2 = await db.i_users.upsertByPrimaryIndex({ id: id, @@ -116,19 +116,19 @@ Deno.test("indexable_collection - upsertByPrimaryIndex", async (t) => { update: mockUser3, }, { strategy: "replace", - }) - - assert(cr2.ok) - - const doc = await db.i_users.find(id) - assert(doc !== null) - assert(doc.value.username === mockUser3.username) - assert(doc.value.age === mockUser3.age) - assert(doc.value.address.city === mockUser3.address?.city) - assert(doc.value.address.country === mockUser3.address.country) - assert(doc.value.address.houseNr === mockUser3.address.houseNr) - assert(doc.value.address.street === mockUser3.address.street) - }) + }); + + assert(cr2.ok); + + const doc = await db.i_users.find(id); + assert(doc !== null); + assert(doc.value.username === mockUser3.username); + assert(doc.value.age === mockUser3.age); + assert(doc.value.address.city === mockUser3.address?.city); + assert(doc.value.address.country === mockUser3.address.country); + assert(doc.value.address.houseNr === mockUser3.address.houseNr); + assert(doc.value.address.street === mockUser3.address.street); + }); }, - ) -}) + ); +}); diff --git a/tests/indexable_collection/watch.test.ts b/tests/indexable_collection/watch.test.ts index d134a39..1cacfde 100644 --- a/tests/indexable_collection/watch.test.ts +++ b/tests/indexable_collection/watch.test.ts @@ -1,74 +1,74 @@ -import { assert } from "../test.deps.ts" -import { mockUser1, mockUser2, mockUser3 } from "../mocks.ts" -import { sleep, useDb } from "../utils.ts" -import type { User } from "../models.ts" -import type { Document } from "../../mod.ts" +import { assert } from "../test.deps.ts"; +import { mockUser1, mockUser2, mockUser3 } from "../mocks.ts"; +import { sleep, useDb } from "../utils.ts"; +import type { User } from "../models.ts"; +import type { Document } from "../../mod.ts"; Deno.test("indexable_collection - watch", async (t) => { await t.step("Should receive all document updates", async () => { await useDb(async (db) => { - const id = "id" - const docs: (Document | null)[] = [] + const id = "id"; + const docs: (Document | null)[] = []; const { promise, cancel } = db.i_users.watch(id, (doc) => { - docs.push(doc) - }) + docs.push(doc); + }); - const cr1 = await db.i_users.set(id, mockUser1) - await sleep(500) - const cr2 = await db.i_users.set(id, mockUser2, { overwrite: true }) - await sleep(500) - const cr3 = await db.i_users.update(id, mockUser3) - await sleep(500) - await db.i_users.delete(id) - await sleep(500) + const cr1 = await db.i_users.set(id, mockUser1); + await sleep(500); + const cr2 = await db.i_users.set(id, mockUser2, { overwrite: true }); + await sleep(500); + const cr3 = await db.i_users.update(id, mockUser3); + await sleep(500); + await db.i_users.delete(id); + await sleep(500); - assert(cr1.ok) - assert(cr2.ok) - assert(cr3.ok) + assert(cr1.ok); + assert(cr2.ok); + assert(cr3.ok); - assert(docs.some((doc) => doc?.value.username === mockUser1.username)) - assert(docs.some((doc) => doc?.value.username === mockUser2.username)) - assert(docs.some((doc) => doc?.value.username === mockUser3.username)) - assert(docs.some((doc) => doc === null)) + assert(docs.some((doc) => doc?.value.username === mockUser1.username)); + assert(docs.some((doc) => doc?.value.username === mockUser2.username)); + assert(docs.some((doc) => doc?.value.username === mockUser3.username)); + assert(docs.some((doc) => doc === null)); - await cancel() - await promise - }) - }) + await cancel(); + await promise; + }); + }); await t.step("Should not receive unrelated document updates", async () => { await useDb(async (db) => { - const id1 = "id1" - const id2 = "id2" - let count = 0 - let username = "" - let lastDoc: any + const id1 = "id1"; + const id2 = "id2"; + let count = 0; + let username = ""; + let lastDoc: any; const { promise, cancel } = db.i_users.watch(id1, (doc) => { - count++ - lastDoc = doc + count++; + lastDoc = doc; if (doc?.value.username) { - username = doc.value.username + username = doc.value.username; } - }) + }); - await db.i_users.set(id2, mockUser1) - await sleep(500) - await db.i_users.set(id2, mockUser2, { overwrite: true }) - await sleep(500) - await db.i_users.update(id2, mockUser3) - await sleep(500) - await db.i_users.delete(id2) - await sleep(500) + await db.i_users.set(id2, mockUser1); + await sleep(500); + await db.i_users.set(id2, mockUser2, { overwrite: true }); + await sleep(500); + await db.i_users.update(id2, mockUser3); + await sleep(500); + await db.i_users.delete(id2); + await sleep(500); // Account for initial invocation - assert(count === 1) - assert(username === "") - assert(lastDoc === null) + assert(count === 1); + assert(username === ""); + assert(lastDoc === null); - await cancel() - await promise - }) - }) -}) + await cancel(); + await promise; + }); + }); +}); diff --git a/tests/indexable_collection/watchMany.test.ts b/tests/indexable_collection/watchMany.test.ts index 90bbc47..5f2e7dc 100644 --- a/tests/indexable_collection/watchMany.test.ts +++ b/tests/indexable_collection/watchMany.test.ts @@ -1,120 +1,120 @@ -import { assert } from "../test.deps.ts" -import { mockUser1, mockUser2, mockUser3 } from "../mocks.ts" -import { generateUsers, sleep, useDb } from "../utils.ts" -import type { User } from "../models.ts" -import type { Document } from "../../mod.ts" +import { assert } from "../test.deps.ts"; +import { mockUser1, mockUser2, mockUser3 } from "../mocks.ts"; +import { generateUsers, sleep, useDb } from "../utils.ts"; +import type { User } from "../models.ts"; +import type { Document } from "../../mod.ts"; Deno.test("indexable_collection - watchMany", async (t) => { await t.step("Should receive all document updates", async () => { await useDb(async (db) => { - const id1 = "id1" - const id2 = "id2" - const id3 = "id3" - const generatedUser = generateUsers(1)[0] - const snapshots: (Document | null)[][] = [] + const id1 = "id1"; + const id2 = "id2"; + const id3 = "id3"; + const generatedUser = generateUsers(1)[0]; + const snapshots: (Document | null)[][] = []; - await db.i_users.set(id3, generatedUser) + await db.i_users.set(id3, generatedUser); - await sleep(500) + await sleep(500); const { promise, cancel } = db.i_users.watchMany( [id1, id2, id3], (docs) => { - snapshots.push(docs) + snapshots.push(docs); }, - ) + ); - const cr1 = await db.i_users.set(id1, mockUser1) - await sleep(500) - await db.i_users.delete(id1) - await sleep(500) - const cr2 = await db.i_users.set(id2, mockUser2, { overwrite: true }) - await sleep(500) - const cr3 = await db.i_users.update(id3, mockUser3) - await sleep(500) + const cr1 = await db.i_users.set(id1, mockUser1); + await sleep(500); + await db.i_users.delete(id1); + await sleep(500); + const cr2 = await db.i_users.set(id2, mockUser2, { overwrite: true }); + await sleep(500); + const cr3 = await db.i_users.update(id3, mockUser3); + await sleep(500); - assert(cr1.ok) - assert(cr2.ok) - assert(cr3.ok) + assert(cr1.ok); + assert(cr2.ok); + assert(cr3.ok); assert(snapshots.some((docs) => { - const doc1 = docs.find((doc) => doc?.id === id1) ?? null - const doc2 = docs.find((doc) => doc?.id === id2) ?? null - const doc3 = docs.find((doc) => doc?.id === id3) ?? null + const doc1 = docs.find((doc) => doc?.id === id1) ?? null; + const doc2 = docs.find((doc) => doc?.id === id2) ?? null; + const doc3 = docs.find((doc) => doc?.id === id3) ?? null; return doc1?.value.username === mockUser1.username && doc2 === null && - doc3?.value.username === generatedUser.username - })) + doc3?.value.username === generatedUser.username; + })); assert(snapshots.some((docs) => { - const doc1 = docs.find((doc) => doc?.id === id1) ?? null - const doc2 = docs.find((doc) => doc?.id === id2) ?? null - const doc3 = docs.find((doc) => doc?.id === id3) ?? null + const doc1 = docs.find((doc) => doc?.id === id1) ?? null; + const doc2 = docs.find((doc) => doc?.id === id2) ?? null; + const doc3 = docs.find((doc) => doc?.id === id3) ?? null; return doc1 === null && doc2 === null && - doc3?.value.username === generatedUser.username - })) + doc3?.value.username === generatedUser.username; + })); assert(snapshots.some((docs) => { - const doc1 = docs.find((doc) => doc?.id === id1) ?? null - const doc2 = docs.find((doc) => doc?.id === id2) ?? null - const doc3 = docs.find((doc) => doc?.id === id3) ?? null + const doc1 = docs.find((doc) => doc?.id === id1) ?? null; + const doc2 = docs.find((doc) => doc?.id === id2) ?? null; + const doc3 = docs.find((doc) => doc?.id === id3) ?? null; return doc1 === null && doc2?.value.username === mockUser2.username && - doc3?.value.username === generatedUser.username - })) + doc3?.value.username === generatedUser.username; + })); assert(snapshots.some((docs) => { - const doc1 = docs.find((doc) => doc?.id === id1) ?? null - const doc2 = docs.find((doc) => doc?.id === id2) ?? null - const doc3 = docs.find((doc) => doc?.id === id3) ?? null + const doc1 = docs.find((doc) => doc?.id === id1) ?? null; + const doc2 = docs.find((doc) => doc?.id === id2) ?? null; + const doc3 = docs.find((doc) => doc?.id === id3) ?? null; return doc1 === null && doc2?.value.username === mockUser2.username && - doc3?.value.username === mockUser3.username - })) + doc3?.value.username === mockUser3.username; + })); - await cancel() - await promise - }) - }) + await cancel(); + await promise; + }); + }); await t.step("Should not receive unrelated document updates", async () => { await useDb(async (db) => { - const id1 = "id1" - const id2 = "id1" - const id3 = "id1" - const id4 = "id4" - let count = 0 - let lastDocs: any[] = [] + const id1 = "id1"; + const id2 = "id1"; + const id3 = "id1"; + const id4 = "id4"; + let count = 0; + let lastDocs: any[] = []; const { promise, cancel } = db.i_users.watchMany( [id1, id2, id3], (docs) => { - count++ - lastDocs = docs + count++; + lastDocs = docs; }, - ) - - await db.i_users.set(id4, mockUser1) - await sleep(500) - await db.i_users.set(id4, mockUser2, { overwrite: true }) - await sleep(500) - await db.i_users.update(id4, mockUser3) - await sleep(500) - await db.i_users.delete(id4) - await sleep(500) - - assert(count === 1) - assert(lastDocs[0] === null) - assert(lastDocs[1] === null) - assert(lastDocs[2] === null) - - await cancel() - await promise - }) - }) -}) + ); + + await db.i_users.set(id4, mockUser1); + await sleep(500); + await db.i_users.set(id4, mockUser2, { overwrite: true }); + await sleep(500); + await db.i_users.update(id4, mockUser3); + await sleep(500); + await db.i_users.delete(id4); + await sleep(500); + + assert(count === 1); + assert(lastDocs[0] === null); + assert(lastDocs[1] === null); + assert(lastDocs[2] === null); + + await cancel(); + await promise; + }); + }); +}); diff --git a/tests/mocks.ts b/tests/mocks.ts index 12972a5..f1babca 100644 --- a/tests/mocks.ts +++ b/tests/mocks.ts @@ -1,4 +1,4 @@ -import type { User } from "./models.ts" +import type { User } from "./models.ts"; export const mockUser1: User = { username: "oliver", @@ -9,7 +9,7 @@ export const mockUser1: User = { street: "Olav Kyrres gate", houseNr: 420, }, -} +}; export const mockUser2: User = { username: "elias", @@ -20,7 +20,7 @@ export const mockUser2: User = { street: "Karl Johans gate", houseNr: 420, }, -} +}; export const mockUser3: User = { username: "anders", @@ -30,7 +30,7 @@ export const mockUser3: User = { city: "Dublin", houseNr: 10, }, -} +}; export const mockUserInvalid = { username: 100, @@ -39,7 +39,7 @@ export const mockUserInvalid = { street: "Karl Johans gate", houseNr: "420", }, -} as unknown as User +} as unknown as User; export const mockUsersWithAlteredAge: User[] = [ { @@ -54,4 +54,4 @@ export const mockUsersWithAlteredAge: User[] = [ ...mockUser3, age: 20, }, -] +]; diff --git a/tests/models.ts b/tests/models.ts index 0e823f1..8f9f618 100644 --- a/tests/models.ts +++ b/tests/models.ts @@ -1,31 +1,31 @@ -import { model } from "../mod.ts" -import { z } from "./test.deps.ts" +import { model } from "../mod.ts"; +import { z } from "./test.deps.ts"; -export type Address = z.infer +export type Address = z.infer; -export type User = z.infer +export type User = z.infer; export type OutputUser = { - name: string - decadeAge: number - addressStr: string -} + name: string; + decadeAge: number; + addressStr: string; +}; export const AddressSchema = z.object({ country: z.string(), city: z.string(), street: z.string().optional(), houseNr: z.number().nullable(), -}) +}); export const UserSchema = z.object({ username: z.string(), age: z.number(), address: AddressSchema, -}) +}); export const TransformUserModel = model((user) => ({ name: user.username, decadeAge: user.age / 10, addressStr: `${user.address.city}, ${user.address.country}`, -})) +})); diff --git a/tests/serialized_collection/add.test.ts b/tests/serialized_collection/add.test.ts index 5b83f9f..9170edf 100644 --- a/tests/serialized_collection/add.test.ts +++ b/tests/serialized_collection/add.test.ts @@ -1,45 +1,45 @@ -import { assert } from "../test.deps.ts" -import { mockUserInvalid } from "../mocks.ts" -import { generateLargeUsers, useDb } from "../utils.ts" +import { assert } from "../test.deps.ts"; +import { mockUserInvalid } from "../mocks.ts"; +import { generateLargeUsers, useDb } from "../utils.ts"; Deno.test("serialized_collection - add", async (t) => { await t.step("Should add new document entry to collection", async () => { await useDb(async (db) => { - const [user] = generateLargeUsers(1) + const [user] = generateLargeUsers(1); - const cr = await db.s_users.add(user) - assert(cr.ok) + const cr = await db.s_users.add(user); + assert(cr.ok); - const doc = await db.s_users.find(cr.id) - assert(doc !== null) - assert(doc.value.username === user.username) - }) - }) + const doc = await db.s_users.find(cr.id); + assert(doc !== null); + assert(doc.value.username === user.username); + }); + }); await t.step( "Should successfully parse and add new document entry to collection", async () => { await useDb(async (db) => { - const [user] = generateLargeUsers(1) + const [user] = generateLargeUsers(1); - const cr = await db.zs_users.add(user) - assert(cr.ok) + const cr = await db.zs_users.add(user); + assert(cr.ok); - const doc = await db.zs_users.find(cr.id) - assert(doc !== null) - assert(doc.value.username === user.username) - }) + const doc = await db.zs_users.find(cr.id); + assert(doc !== null); + assert(doc.value.username === user.username); + }); }, - ) + ); await t.step( "Should fail parse and add new document entry to collection", async () => { await useDb(async (db) => { - let assertion = false - await db.zs_users.add(mockUserInvalid).catch(() => assertion = true) - assert(assertion) - }) + let assertion = false; + await db.zs_users.add(mockUserInvalid).catch(() => assertion = true); + assert(assertion); + }); }, - ) -}) + ); +}); diff --git a/tests/serialized_collection/addMany.test.ts b/tests/serialized_collection/addMany.test.ts index 09df221..3662fa8 100644 --- a/tests/serialized_collection/addMany.test.ts +++ b/tests/serialized_collection/addMany.test.ts @@ -1,58 +1,58 @@ -import { assert } from "../test.deps.ts" -import { generateInvalidUsers, generateLargeUsers, useDb } from "../utils.ts" +import { assert } from "../test.deps.ts"; +import { generateInvalidUsers, generateLargeUsers, useDb } from "../utils.ts"; Deno.test("serialized_collection - addMany", async (t) => { await t.step( "Should successfully add 1000 documents to the collection", async () => { await useDb(async (db) => { - const users = generateLargeUsers(1_000) - const cr = await db.s_users.addMany(users) - assert(cr.ok) + const users = generateLargeUsers(1_000); + const cr = await db.s_users.addMany(users); + assert(cr.ok); - const { result: docs } = await db.s_users.getMany() + const { result: docs } = await db.s_users.getMany(); - assert(docs.length === users.length) + assert(docs.length === users.length); assert( users.every((user) => docs.some((doc) => doc.value.username === user.username) ), - ) - }) + ); + }); }, - ) + ); await t.step( "Should successfully parse and add 1000 documents to the collection", async () => { await useDb(async (db) => { - const users = generateLargeUsers(1_000) - const cr = await db.zs_users.addMany(users) - assert(cr.ok) + const users = generateLargeUsers(1_000); + const cr = await db.zs_users.addMany(users); + assert(cr.ok); - const { result: docs } = await db.zs_users.getMany() + const { result: docs } = await db.zs_users.getMany(); - assert(docs.length === users.length) + assert(docs.length === users.length); assert( users.every((user) => docs.some((doc) => doc.value.username === user.username) ), - ) - }) + ); + }); }, - ) + ); await t.step( "Should fail to parse and add 1000 documents to the collection", async () => { await useDb(async (db) => { - const users = generateInvalidUsers(1_000) - let assertion = false + const users = generateInvalidUsers(1_000); + let assertion = false; - await db.zs_users.addMany(users).catch(() => assertion = true) + await db.zs_users.addMany(users).catch(() => assertion = true); - assert(assertion) - }) + assert(assertion); + }); }, - ) -}) + ); +}); diff --git a/tests/serialized_collection/count.test.ts b/tests/serialized_collection/count.test.ts index b0f12a6..da6e321 100644 --- a/tests/serialized_collection/count.test.ts +++ b/tests/serialized_collection/count.test.ts @@ -1,21 +1,21 @@ -import { assert } from "../test.deps.ts" -import { generateLargeUsers, useDb } from "../utils.ts" +import { assert } from "../test.deps.ts"; +import { generateLargeUsers, useDb } from "../utils.ts"; Deno.test("serialized_collection - count", async (t) => { await t.step( "Should correctly count total number of documents in the collection", async () => { await useDb(async (db) => { - const count1 = await db.s_users.count() - assert(count1 === 0) + const count1 = await db.s_users.count(); + assert(count1 === 0); - const users = generateLargeUsers(1_000) - const cr = await db.s_users.addMany(users) - assert(cr.ok) + const users = generateLargeUsers(1_000); + const cr = await db.s_users.addMany(users); + assert(cr.ok); - const count2 = await db.s_users.count() - assert(count2 === users.length) - }) + const count2 = await db.s_users.count(); + assert(count2 === users.length); + }); }, - ) -}) + ); +}); diff --git a/tests/serialized_collection/delete.test.ts b/tests/serialized_collection/delete.test.ts index 4747419..c6e1a39 100644 --- a/tests/serialized_collection/delete.test.ts +++ b/tests/serialized_collection/delete.test.ts @@ -1,47 +1,47 @@ -import { assert } from "../test.deps.ts" -import { mockUser1 } from "../mocks.ts" -import { generateLargeUsers, useDb } from "../utils.ts" +import { assert } from "../test.deps.ts"; +import { mockUser1 } from "../mocks.ts"; +import { generateLargeUsers, useDb } from "../utils.ts"; Deno.test("serialized_collection - delete", async (t) => { await t.step( "Should successfully delete a document from the collection", async () => { await useDb(async (db) => { - const cr = await db.s_users.add(mockUser1) - const count1 = await db.s_users.count() + const cr = await db.s_users.add(mockUser1); + const count1 = await db.s_users.count(); - assert(cr.ok) - assert(count1 === 1) + assert(cr.ok); + assert(count1 === 1); - await db.s_users.delete(cr.id) + await db.s_users.delete(cr.id); - const count2 = await db.s_users.count() - const doc = await db.s_users.find(cr.id) + const count2 = await db.s_users.count(); + const doc = await db.s_users.find(cr.id); - assert(count2 === 0) - assert(doc === null) - }) + assert(count2 === 0); + assert(doc === null); + }); }, - ) + ); await t.step( "Should successfully delete 1000 documents from the collection", async () => { await useDb(async (db) => { - const users = generateLargeUsers(1_000) - const cr = await db.s_users.addMany(users) - const count1 = await db.s_users.count() + const users = generateLargeUsers(1_000); + const cr = await db.s_users.addMany(users); + const count1 = await db.s_users.count(); - assert(cr.ok) - assert(count1 === users.length) + assert(cr.ok); + assert(count1 === users.length); - const { result: ids } = await db.s_users.map((doc) => doc.id) + const { result: ids } = await db.s_users.map((doc) => doc.id); - await db.s_users.delete(...ids) + await db.s_users.delete(...ids); - const count2 = await db.s_users.count() - assert(count2 === 0) - }) + const count2 = await db.s_users.count(); + assert(count2 === 0); + }); }, - ) -}) + ); +}); diff --git a/tests/serialized_collection/deleteMany.test.ts b/tests/serialized_collection/deleteMany.test.ts index 68f052e..bb3df78 100644 --- a/tests/serialized_collection/deleteMany.test.ts +++ b/tests/serialized_collection/deleteMany.test.ts @@ -1,20 +1,20 @@ -import { assert } from "../test.deps.ts" -import { generateLargeUsers, useDb } from "../utils.ts" +import { assert } from "../test.deps.ts"; +import { generateLargeUsers, useDb } from "../utils.ts"; Deno.test("serialized_collection - deleteMany", async (t) => { await t.step("Should delete all documents from the collection", async () => { await useDb(async (db) => { - const users = generateLargeUsers(1_000) - const cr = await db.s_users.addMany(users) - assert(cr.ok) + const users = generateLargeUsers(1_000); + const cr = await db.s_users.addMany(users); + assert(cr.ok); - const count1 = await db.s_users.count() - assert(count1 === users.length) + const count1 = await db.s_users.count(); + assert(count1 === users.length); - await db.s_users.deleteMany() + await db.s_users.deleteMany(); - const count2 = await db.s_users.count() - assert(count2 === 0) - }) - }) -}) + const count2 = await db.s_users.count(); + assert(count2 === 0); + }); + }); +}); diff --git a/tests/serialized_collection/enqueue.test.ts b/tests/serialized_collection/enqueue.test.ts index e995cb9..e22247e 100644 --- a/tests/serialized_collection/enqueue.test.ts +++ b/tests/serialized_collection/enqueue.test.ts @@ -4,100 +4,100 @@ import { type KvValue, model, type QueueMessage, -} from "../../mod.ts" -import { createHandlerId } from "../../src/utils.ts" -import { assert } from "../test.deps.ts" -import type { User } from "../models.ts" -import { createResolver, useDb, useKv } from "../utils.ts" +} from "../../mod.ts"; +import { createHandlerId } from "../../src/utils.ts"; +import { assert } from "../test.deps.ts"; +import type { User } from "../models.ts"; +import { createResolver, useDb, useKv } from "../utils.ts"; Deno.test("serialized_collection - enqueue", async (t) => { await t.step("Should enqueue message with string data", async () => { await useKv(async (kv) => { - const data = "data" - const undeliveredId = "undelivered" + const data = "data"; + const undeliveredId = "undelivered"; const db = kvdex(kv, { s_users: collection(model(), { serialize: "json" }), - }) + }); - const sleeper = createResolver() - const handlerId = createHandlerId(db.s_users._keys.base, undefined) - let assertion = false + const sleeper = createResolver(); + const handlerId = createHandlerId(db.s_users._keys.base, undefined); + let assertion = false; const listener = kv.listenQueue((msg) => { - const qMsg = msg as QueueMessage - assertion = qMsg.__handlerId__ === handlerId && qMsg.__data__ === data - sleeper.resolve() - }) + const qMsg = msg as QueueMessage; + assertion = qMsg.__handlerId__ === handlerId && qMsg.__data__ === data; + sleeper.resolve(); + }); await db.s_users.enqueue(data, { idsIfUndelivered: [undeliveredId], - }) + }); - await sleeper.promise + await sleeper.promise; - const undelivered = await db.s_users.findUndelivered(undeliveredId) - assert(assertion || typeof undelivered?.value === typeof data) + const undelivered = await db.s_users.findUndelivered(undeliveredId); + assert(assertion || typeof undelivered?.value === typeof data); - return async () => await listener - }) - }) + return async () => await listener; + }); + }); await t.step("Should enqueue message in correct topic", async () => { await useDb(async (db) => { - const data = "data" - const undeliveredId = "undelivered" - const topic = "topic" + const data = "data"; + const undeliveredId = "undelivered"; + const topic = "topic"; - const sleeper = createResolver() - let assertion1 = false - let assertion2 = true + const sleeper = createResolver(); + let assertion1 = false; + let assertion2 = true; const l1 = db.s_users.listenQueue(() => { - assertion1 = true - sleeper.resolve() - }, { topic }) + assertion1 = true; + sleeper.resolve(); + }, { topic }); - const l2 = db.s_users.listenQueue(() => assertion2 = false) + const l2 = db.s_users.listenQueue(() => assertion2 = false); await db.s_users.enqueue("data", { idsIfUndelivered: [undeliveredId], topic, - }) + }); - await sleeper.promise + await sleeper.promise; - const undelivered = await db.s_users.findUndelivered(undeliveredId) - assert(assertion1 || typeof undelivered?.value === typeof data) - assert(assertion2) + const undelivered = await db.s_users.findUndelivered(undeliveredId); + assert(assertion1 || typeof undelivered?.value === typeof data); + assert(assertion2); - return async () => await Promise.all([l1, l2]) - }) - }) + return async () => await Promise.all([l1, l2]); + }); + }); await t.step("Should enqueue message with undefined data", async () => { await useDb(async (db) => { - const data = undefined - const undeliveredId = "undelivered" - const sleeper = createResolver() + const data = undefined; + const undeliveredId = "undelivered"; + const sleeper = createResolver(); - let assertion = false + let assertion = false; const listener = db.s_users.listenQueue((msg) => { - assertion = msg === data - sleeper.resolve() - }) + assertion = msg === data; + sleeper.resolve(); + }); await db.s_users.enqueue(data, { idsIfUndelivered: [undeliveredId], - }) + }); - await sleeper.promise + await sleeper.promise; - const undelivered = await db.s_users.findUndelivered(undeliveredId) - assert(assertion || typeof undelivered?.value === typeof data) + const undelivered = await db.s_users.findUndelivered(undeliveredId); + assert(assertion || typeof undelivered?.value === typeof data); - return async () => await listener - }) - }) -}) + return async () => await listener; + }); + }); +}); diff --git a/tests/serialized_collection/find.test.ts b/tests/serialized_collection/find.test.ts index 9e0e948..d85b5af 100644 --- a/tests/serialized_collection/find.test.ts +++ b/tests/serialized_collection/find.test.ts @@ -1,25 +1,25 @@ -import { assert } from "../test.deps.ts" -import { mockUser1 } from "../mocks.ts" -import { useDb } from "../utils.ts" +import { assert } from "../test.deps.ts"; +import { mockUser1 } from "../mocks.ts"; +import { useDb } from "../utils.ts"; Deno.test("serialized_collection - find", async (t) => { await t.step("Should find document by id", async () => { await useDb(async (db) => { - const id = "id" + const id = "id"; - const cr = await db.s_users.set(id, mockUser1) - assert(cr.ok) + const cr = await db.s_users.set(id, mockUser1); + assert(cr.ok); - const doc = await db.s_users.find(id) - assert(doc !== null) - assert(doc.value.username === mockUser1.username) - }) - }) + const doc = await db.s_users.find(id); + assert(doc !== null); + assert(doc.value.username === mockUser1.username); + }); + }); await t.step("Should not find document by non-existing id", async () => { await useDb(async (db) => { - const doc = await db.s_users.find("123") - assert(doc === null) - }) - }) -}) + const doc = await db.s_users.find("123"); + assert(doc === null); + }); + }); +}); diff --git a/tests/serialized_collection/findMany.test.ts b/tests/serialized_collection/findMany.test.ts index 41f24d4..9c0fe24 100644 --- a/tests/serialized_collection/findMany.test.ts +++ b/tests/serialized_collection/findMany.test.ts @@ -1,32 +1,32 @@ -import { assert } from "../test.deps.ts" -import { generateLargeUsers, useDb } from "../utils.ts" +import { assert } from "../test.deps.ts"; +import { generateLargeUsers, useDb } from "../utils.ts"; Deno.test("serialized_collection - findMany", async (t) => { await t.step("Should find all documents", async () => { await useDb(async (db) => { - const users = generateLargeUsers(1_000) - const cr = await db.s_users.addMany(users) - assert(cr.ok) + const users = generateLargeUsers(1_000); + const cr = await db.s_users.addMany(users); + assert(cr.ok); - const { result: docs } = await db.s_users.getMany() + const { result: docs } = await db.s_users.getMany(); - assert(docs.length === users.length) + assert(docs.length === users.length); assert( users.every((user) => docs.some((doc) => doc.value.username === user.username) ), - ) - }) - }) + ); + }); + }); await t.step("Should not find any documents", async () => { await useDb(async (db) => { - const users = generateLargeUsers(10) - const cr = await db.s_users.addMany(users) - assert(cr.ok) + const users = generateLargeUsers(10); + const cr = await db.s_users.addMany(users); + assert(cr.ok); - const docs = await db.s_users.findMany(["", "", ""]) - assert(docs.length === 0) - }) - }) -}) + const docs = await db.s_users.findMany(["", "", ""]); + assert(docs.length === 0); + }); + }); +}); diff --git a/tests/serialized_collection/forEach.test.ts b/tests/serialized_collection/forEach.test.ts index 2c4733f..8af7a05 100644 --- a/tests/serialized_collection/forEach.test.ts +++ b/tests/serialized_collection/forEach.test.ts @@ -1,27 +1,27 @@ -import type { Document } from "../../mod.ts" -import { assert } from "../test.deps.ts" -import type { User } from "../models.ts" -import { generateLargeUsers, useDb } from "../utils.ts" +import type { Document } from "../../mod.ts"; +import { assert } from "../test.deps.ts"; +import type { User } from "../models.ts"; +import { generateLargeUsers, useDb } from "../utils.ts"; Deno.test("serialized_collection - forEach", async (t) => { await t.step( "Should run callback function for each document in the collection", async () => { await useDb(async (db) => { - const users = generateLargeUsers(1_000) - const cr = await db.s_users.addMany(users) - assert(cr.ok) + const users = generateLargeUsers(1_000); + const cr = await db.s_users.addMany(users); + assert(cr.ok); - const docs: Document[] = [] - await db.s_users.forEach((doc) => docs.push(doc)) + const docs: Document[] = []; + await db.s_users.forEach((doc) => docs.push(doc)); - assert(docs.length === users.length) + assert(docs.length === users.length); assert( users.every((user) => docs.some((doc) => doc.value.username === user.username) ), - ) - }) + ); + }); }, - ) -}) + ); +}); diff --git a/tests/serialized_collection/getMany.test.ts b/tests/serialized_collection/getMany.test.ts index 1a01c08..0696af7 100644 --- a/tests/serialized_collection/getMany.test.ts +++ b/tests/serialized_collection/getMany.test.ts @@ -1,20 +1,20 @@ -import { assert } from "../test.deps.ts" -import { generateLargeUsers, useDb } from "../utils.ts" +import { assert } from "../test.deps.ts"; +import { generateLargeUsers, useDb } from "../utils.ts"; Deno.test("serialized_collection - getMany", async (t) => { await t.step("Should get all documents", async () => { await useDb(async (db) => { - const users = generateLargeUsers(1_000) - const cr = await db.s_users.addMany(users) - assert(cr.ok) + const users = generateLargeUsers(1_000); + const cr = await db.s_users.addMany(users); + assert(cr.ok); - const { result } = await db.s_users.getMany() - assert(result.length === users.length) + const { result } = await db.s_users.getMany(); + assert(result.length === users.length); assert( users.every((user) => result.some((doc) => doc.value.username === user.username) ), - ) - }) - }) -}) + ); + }); + }); +}); diff --git a/tests/serialized_collection/getOne.test.ts b/tests/serialized_collection/getOne.test.ts index df29cba..4664886 100644 --- a/tests/serialized_collection/getOne.test.ts +++ b/tests/serialized_collection/getOne.test.ts @@ -1,20 +1,20 @@ -import { assert } from "../test.deps.ts" -import { sleep, useDb } from "../utils.ts" -import { mockUser1, mockUser2 } from "../mocks.ts" +import { assert } from "../test.deps.ts"; +import { sleep, useDb } from "../utils.ts"; +import { mockUser1, mockUser2 } from "../mocks.ts"; Deno.test("serialized_collection - getOne", async (t) => { await t.step("Should get only one document", async () => { await useDb(async (db) => { - const cr1 = await db.s_users.add(mockUser1) - await sleep(10) - const cr2 = await db.s_users.add(mockUser2) + const cr1 = await db.s_users.add(mockUser1); + await sleep(10); + const cr2 = await db.s_users.add(mockUser2); - assert(cr1.ok) - assert(cr2.ok) + assert(cr1.ok); + assert(cr2.ok); - const doc = await db.s_users.getOne() - assert(doc !== null) - assert(doc.value.username === mockUser1.username) - }) - }) -}) + const doc = await db.s_users.getOne(); + assert(doc !== null); + assert(doc.value.username === mockUser1.username); + }); + }); +}); diff --git a/tests/serialized_collection/history.test.ts b/tests/serialized_collection/history.test.ts index 2a0c535..42f567e 100644 --- a/tests/serialized_collection/history.test.ts +++ b/tests/serialized_collection/history.test.ts @@ -1,8 +1,8 @@ -import { collection, kvdex, model } from "../../mod.ts" -import { assert } from "../test.deps.ts" -import { sleep, useKv } from "../utils.ts" -import { mockUser1, mockUser2, mockUser3 } from "../mocks.ts" -import type { User } from "../models.ts" +import { collection, kvdex, model } from "../../mod.ts"; +import { assert } from "../test.deps.ts"; +import { sleep, useKv } from "../utils.ts"; +import { mockUser1, mockUser2, mockUser3 } from "../mocks.ts"; +import type { User } from "../models.ts"; Deno.test("serialized_collection - history", async (t) => { await t.step( @@ -14,27 +14,27 @@ Deno.test("serialized_collection - history", async (t) => { history: true, serialize: "json", }), - }) - - const id = "id" - await db.users.set(id, mockUser1, { overwrite: true }) - await sleep(10) - await db.users.set(id, mockUser2, { overwrite: true }) - await sleep(10) - await db.users.set(id, mockUser3, { overwrite: true }) - - const { result: [h1, h2, h3] } = await db.users.findHistory(id) - assert(h1.type === "write") - assert(h1.value.username === mockUser1.username) - assert(h1.timestamp.valueOf() <= h2.timestamp.valueOf()) - assert(h2.type === "write") - assert(h2.value.username === mockUser2.username) - assert(h2.timestamp.valueOf() <= h3.timestamp.valueOf()) - assert(h3.type === "write") - assert(h3.value.username === mockUser3.username) - }) + }); + + const id = "id"; + await db.users.set(id, mockUser1, { overwrite: true }); + await sleep(10); + await db.users.set(id, mockUser2, { overwrite: true }); + await sleep(10); + await db.users.set(id, mockUser3, { overwrite: true }); + + const { result: [h1, h2, h3] } = await db.users.findHistory(id); + assert(h1.type === "write"); + assert(h1.value.username === mockUser1.username); + assert(h1.timestamp.valueOf() <= h2.timestamp.valueOf()); + assert(h2.type === "write"); + assert(h2.value.username === mockUser2.username); + assert(h2.timestamp.valueOf() <= h3.timestamp.valueOf()); + assert(h3.type === "write"); + assert(h3.value.username === mockUser3.username); + }); }, - ) + ); await t.step( "Should persist history of multiple inserts in correct order after deleting", @@ -45,35 +45,35 @@ Deno.test("serialized_collection - history", async (t) => { history: true, serialize: "json", }), - }) - - const id = "id" - await db.users.set(id, mockUser1, { overwrite: true }) - await sleep(10) - await db.users.delete(id) - await sleep(10) - await db.users.set(id, mockUser2, { overwrite: true }) - await sleep(10) - await db.users.set(id, mockUser3, { overwrite: true }) - await sleep(10) - await db.users.delete(id) - - const { result: [h1, h2, h3, h4, h5] } = await db.users.findHistory(id) - assert(h1.type === "write") - assert(h1.value.username === mockUser1.username) - assert(h1.timestamp.valueOf() <= h2.timestamp.valueOf()) - assert(h2.type === "delete") - assert(h2.timestamp.valueOf() <= h3.timestamp.valueOf()) - assert(h3.type === "write") - assert(h3.value.username === mockUser2.username) - assert(h3.timestamp.valueOf() <= h4.timestamp.valueOf()) - assert(h4.type === "write") - assert(h4.value.username === mockUser3.username) - assert(h4.timestamp.valueOf() <= h5.timestamp.valueOf()) - assert(h5.type === "delete") - }) + }); + + const id = "id"; + await db.users.set(id, mockUser1, { overwrite: true }); + await sleep(10); + await db.users.delete(id); + await sleep(10); + await db.users.set(id, mockUser2, { overwrite: true }); + await sleep(10); + await db.users.set(id, mockUser3, { overwrite: true }); + await sleep(10); + await db.users.delete(id); + + const { result: [h1, h2, h3, h4, h5] } = await db.users.findHistory(id); + assert(h1.type === "write"); + assert(h1.value.username === mockUser1.username); + assert(h1.timestamp.valueOf() <= h2.timestamp.valueOf()); + assert(h2.type === "delete"); + assert(h2.timestamp.valueOf() <= h3.timestamp.valueOf()); + assert(h3.type === "write"); + assert(h3.value.username === mockUser2.username); + assert(h3.timestamp.valueOf() <= h4.timestamp.valueOf()); + assert(h4.type === "write"); + assert(h4.value.username === mockUser3.username); + assert(h4.timestamp.valueOf() <= h5.timestamp.valueOf()); + assert(h5.type === "delete"); + }); }, - ) + ); await t.step( "Should persist history of multiple inserts and updates in correct order", @@ -84,27 +84,27 @@ Deno.test("serialized_collection - history", async (t) => { history: true, serialize: "json", }), - }) - - const id = "id" - await db.users.set(id, mockUser1, { overwrite: true }) - await sleep(10) - await db.users.update(id, mockUser2) - await sleep(10) - await db.users.update(id, mockUser3) - - const { result: [h1, h2, h3] } = await db.users.findHistory(id) - assert(h1.type === "write") - assert(h1.value.username === mockUser1.username) - assert(h1.timestamp.valueOf() <= h2.timestamp.valueOf()) - assert(h2.type === "write") - assert(h2.value.username === mockUser2.username) - assert(h2.timestamp.valueOf() <= h3.timestamp.valueOf()) - assert(h3.type === "write") - assert(h3.value.username === mockUser3.username) - }) + }); + + const id = "id"; + await db.users.set(id, mockUser1, { overwrite: true }); + await sleep(10); + await db.users.update(id, mockUser2); + await sleep(10); + await db.users.update(id, mockUser3); + + const { result: [h1, h2, h3] } = await db.users.findHistory(id); + assert(h1.type === "write"); + assert(h1.value.username === mockUser1.username); + assert(h1.timestamp.valueOf() <= h2.timestamp.valueOf()); + assert(h2.type === "write"); + assert(h2.value.username === mockUser2.username); + assert(h2.timestamp.valueOf() <= h3.timestamp.valueOf()); + assert(h3.type === "write"); + assert(h3.value.username === mockUser3.username); + }); }, - ) + ); await t.step( "Should persist version history of insert and delete by deleteMany()", @@ -115,30 +115,30 @@ Deno.test("serialized_collection - history", async (t) => { history: true, serialize: "json", }), - }) - - const id = "id" - await db.users.set(id, mockUser1, { overwrite: true }) - await sleep(10) - await db.users.deleteMany() - await sleep(10) - await db.users.set(id, mockUser2, { overwrite: true }) - await sleep(10) - await db.users.deleteMany({ filter: () => true }) - - const { result: [h1, h2, h3, h4] } = await db.users.findHistory(id) - assert(h1.type === "write") - assert(h1.value.username === mockUser1.username) - assert(h1.timestamp.valueOf() <= h2.timestamp.valueOf()) - assert(h2.type === "delete") - assert(h2.timestamp.valueOf() <= h3.timestamp.valueOf()) - assert(h3.type === "write") - assert(h3.value.username === mockUser2.username) - assert(h3.timestamp.valueOf() <= h4.timestamp.valueOf()) - assert(h4.type === "delete") - }) + }); + + const id = "id"; + await db.users.set(id, mockUser1, { overwrite: true }); + await sleep(10); + await db.users.deleteMany(); + await sleep(10); + await db.users.set(id, mockUser2, { overwrite: true }); + await sleep(10); + await db.users.deleteMany({ filter: () => true }); + + const { result: [h1, h2, h3, h4] } = await db.users.findHistory(id); + assert(h1.type === "write"); + assert(h1.value.username === mockUser1.username); + assert(h1.timestamp.valueOf() <= h2.timestamp.valueOf()); + assert(h2.type === "delete"); + assert(h2.timestamp.valueOf() <= h3.timestamp.valueOf()); + assert(h3.type === "write"); + assert(h3.value.username === mockUser2.username); + assert(h3.timestamp.valueOf() <= h4.timestamp.valueOf()); + assert(h4.type === "delete"); + }); }, - ) + ); await t.step( "Should not find history", @@ -146,19 +146,19 @@ Deno.test("serialized_collection - history", async (t) => { await useKv(async (kv) => { const db = kvdex(kv, { users: collection(model()), - }) + }); - const id = "id" - await db.users.set(id, mockUser1, { overwrite: true }) - await db.users.update(id, mockUser2) - await db.users.delete(id) - await db.users.deleteMany() + const id = "id"; + await db.users.set(id, mockUser1, { overwrite: true }); + await db.users.update(id, mockUser2); + await db.users.delete(id); + await db.users.deleteMany(); - const { result: history } = await db.users.findHistory(id) - assert(history.length === 0) - }) + const { result: history } = await db.users.findHistory(id); + assert(history.length === 0); + }); }, - ) + ); await t.step("Should find filtered history", async () => { await useKv(async (kv) => { @@ -167,39 +167,39 @@ Deno.test("serialized_collection - history", async (t) => { history: true, serialize: "json", }), - }) + }); - const id = "id" - await db.users.set(id, mockUser1, { overwrite: true }) - await db.users.delete(id) - await db.users.set(id, mockUser2, { overwrite: true }) - await db.users.update(id, mockUser3) + const id = "id"; + await db.users.set(id, mockUser1, { overwrite: true }); + await db.users.delete(id); + await db.users.set(id, mockUser2, { overwrite: true }); + await db.users.update(id, mockUser3); const { result: history1 } = await db.users.findHistory(id, { filter: (entry) => entry.type === "delete", - }) + }); const { result: history2 } = await db.users.findHistory(id, { filter: (entry) => entry.type === "write" && entry.value.age === mockUser1.age, - }) + }); - assert(history1.length === 1) - assert(history2.length === 2) + assert(history1.length === 1); + assert(history2.length === 2); assert( history2.some((h) => h.type === "write" && h.value.username === mockUser1.username ), - ) + ); assert( history2.some((h) => h.type === "write" && h.value.username === mockUser2.username ), - ) - }) - }) + ); + }); + }); await t.step("Should delete all document history", async () => { await useKv(async (kv) => { @@ -208,27 +208,27 @@ Deno.test("serialized_collection - history", async (t) => { serialize: "json", history: true, }), - }) - - const id = "id" - await db.users.set(id, mockUser1, { overwrite: true }) - await db.users.set(id, mockUser2, { overwrite: true }) - await db.users.set(id, mockUser3, { overwrite: true }) - const cr = await db.users.add(mockUser1) - - assert(cr.ok) - - const { result: history1_1 } = await db.users.findHistory(id) - const { result: history1_2 } = await db.users.findHistory(cr.id) - assert(history1_1.length === 3) - assert(history1_2.length === 1) - - await db.users.deleteHistory(id) - - const { result: history2_1 } = await db.users.findHistory(id) - const { result: history2_2 } = await db.users.findHistory(cr.id) - assert(history2_1.length === 0) - assert(history2_2.length === 1) - }) - }) -}) + }); + + const id = "id"; + await db.users.set(id, mockUser1, { overwrite: true }); + await db.users.set(id, mockUser2, { overwrite: true }); + await db.users.set(id, mockUser3, { overwrite: true }); + const cr = await db.users.add(mockUser1); + + assert(cr.ok); + + const { result: history1_1 } = await db.users.findHistory(id); + const { result: history1_2 } = await db.users.findHistory(cr.id); + assert(history1_1.length === 3); + assert(history1_2.length === 1); + + await db.users.deleteHistory(id); + + const { result: history2_1 } = await db.users.findHistory(id); + const { result: history2_2 } = await db.users.findHistory(cr.id); + assert(history2_1.length === 0); + assert(history2_2.length === 1); + }); + }); +}); diff --git a/tests/serialized_collection/listenQueue.test.ts b/tests/serialized_collection/listenQueue.test.ts index 0672dc2..366503c 100644 --- a/tests/serialized_collection/listenQueue.test.ts +++ b/tests/serialized_collection/listenQueue.test.ts @@ -4,41 +4,41 @@ import { type KvValue, model, type QueueMessage, -} from "../../mod.ts" +} from "../../mod.ts"; import { KVDEX_KEY_PREFIX, UNDELIVERED_KEY_PREFIX, -} from "../../src/constants.ts" -import { createHandlerId, extendKey } from "../../src/utils.ts" -import { assert } from "../test.deps.ts" -import type { User } from "../models.ts" -import { createResolver, sleep, useKv } from "../utils.ts" +} from "../../src/constants.ts"; +import { createHandlerId, extendKey } from "../../src/utils.ts"; +import { assert } from "../test.deps.ts"; +import type { User } from "../models.ts"; +import { createResolver, sleep, useKv } from "../utils.ts"; Deno.test("serialized_collection - listenQueue", async (t) => { await t.step("Should receive message with string data", async () => { await useKv(async (kv) => { - const data = "data" - const undeliveredId = "id" + const data = "data"; + const undeliveredId = "id"; const db = kvdex(kv, { s_users: collection(model()), - }) + }); - const sleeper = createResolver() - let assertion = false + const sleeper = createResolver(); + let assertion = false; const listener = db.s_users.listenQueue((msgData) => { - assertion = msgData === data - sleeper.resolve() - }) + assertion = msgData === data; + sleeper.resolve(); + }); - const handlerId = createHandlerId(db.s_users._keys.base, undefined) + const handlerId = createHandlerId(db.s_users._keys.base, undefined); const msg: QueueMessage = { __is_undefined__: false, __handlerId__: handlerId, __data__: data, - } + }; await kv.enqueue(msg, { keysIfUndelivered: [ @@ -48,36 +48,36 @@ Deno.test("serialized_collection - listenQueue", async (t) => { undeliveredId, ), ], - }) + }); - await sleeper.promise + await sleeper.promise; - const undelivered = await db.s_users.findUndelivered(undeliveredId) - assert(assertion || typeof undelivered?.value === typeof data) + const undelivered = await db.s_users.findUndelivered(undeliveredId); + assert(assertion || typeof undelivered?.value === typeof data); - return async () => await listener - }) - }) + return async () => await listener; + }); + }); await t.step("Should not receive db queue message", async () => { await useKv(async (kv) => { const db = kvdex(kv, { l_users: collection(model()), - }) + }); - let assertion = true + let assertion = true; const listener = db.l_users.listenQueue(() => { - assertion = false - }) + assertion = false; + }); - await db.enqueue("data") + await db.enqueue("data"); - await sleep(500) + await sleep(500); - assert(assertion) + assert(assertion); - return async () => await listener - }) - }) -}) + return async () => await listener; + }); + }); +}); diff --git a/tests/serialized_collection/map.test.ts b/tests/serialized_collection/map.test.ts index 0507a33..46e06cf 100644 --- a/tests/serialized_collection/map.test.ts +++ b/tests/serialized_collection/map.test.ts @@ -1,24 +1,24 @@ -import { assert } from "../test.deps.ts" -import { generateLargeUsers, useDb } from "../utils.ts" +import { assert } from "../test.deps.ts"; +import { generateLargeUsers, useDb } from "../utils.ts"; Deno.test("serialized_collection - map", async (t) => { await t.step( "Should run callback mapper function for each document in the collection", async () => { await useDb(async (db) => { - const users = generateLargeUsers(1_000) - const cr = await db.s_users.addMany(users) - assert(cr.ok) + const users = generateLargeUsers(1_000); + const cr = await db.s_users.addMany(users); + assert(cr.ok); - const { result } = await db.s_users.map((doc) => doc.value.username) + const { result } = await db.s_users.map((doc) => doc.value.username); - assert(result.length === users.length) + assert(result.length === users.length); assert( users.every((user) => result.some((username) => username === user.username) ), - ) - }) + ); + }); }, - ) -}) + ); +}); diff --git a/tests/serialized_collection/properties.test.ts b/tests/serialized_collection/properties.test.ts index eb4296b..456c802 100644 --- a/tests/serialized_collection/properties.test.ts +++ b/tests/serialized_collection/properties.test.ts @@ -1,30 +1,30 @@ -import { collection, type Document, kvdex, model } from "../../mod.ts" +import { collection, type Document, kvdex, model } from "../../mod.ts"; import { ID_KEY_PREFIX, KVDEX_KEY_PREFIX, SEGMENT_KEY_PREFIX, -} from "../../src/constants.ts" -import { extendKey, keyEq } from "../../src/utils.ts" -import { assert } from "../test.deps.ts" -import { mockUser1, mockUser2 } from "../mocks.ts" -import type { User } from "../models.ts" -import { generateLargeUsers, useDb, useKv } from "../utils.ts" -import { mockUser3 } from "../mocks.ts" -import { sleep } from "../utils.ts" +} from "../../src/constants.ts"; +import { extendKey, keyEq } from "../../src/utils.ts"; +import { assert } from "../test.deps.ts"; +import { mockUser1, mockUser2 } from "../mocks.ts"; +import type { User } from "../models.ts"; +import { generateLargeUsers, useDb, useKv } from "../utils.ts"; +import { mockUser3 } from "../mocks.ts"; +import { sleep } from "../utils.ts"; Deno.test("serialized_collection - properties", async (t) => { await t.step("Keys should have the correct prefixes", async () => { await useDb((db) => { - const baseKey = db.s_users._keys.base - const idKey = db.s_users._keys.id - const segmentKey = db.s_users._keys.segment - const prefix = extendKey([KVDEX_KEY_PREFIX], "s_users") + const baseKey = db.s_users._keys.base; + const idKey = db.s_users._keys.id; + const segmentKey = db.s_users._keys.segment; + const prefix = extendKey([KVDEX_KEY_PREFIX], "s_users"); - assert(keyEq(baseKey, prefix)) - assert(keyEq(idKey, extendKey(prefix, ID_KEY_PREFIX))) - assert(keyEq(segmentKey, extendKey(prefix, SEGMENT_KEY_PREFIX))) - }) - }) + assert(keyEq(baseKey, prefix)); + assert(keyEq(idKey, extendKey(prefix, ID_KEY_PREFIX))); + assert(keyEq(segmentKey, extendKey(prefix, SEGMENT_KEY_PREFIX))); + }); + }); await t.step("Should generate ids with custom id generator", async () => { await useKv((kv) => { @@ -37,254 +37,254 @@ Deno.test("serialized_collection - properties", async (t) => { serialize: "json", idGenerator: (data) => data.username, }), - }) + }); - const id1 = db.users1._idGenerator(mockUser1) - const id2 = db.users2._idGenerator(mockUser1) + const id1 = db.users1._idGenerator(mockUser1); + const id2 = db.users2._idGenerator(mockUser1); - assert(typeof id1 === "number") - assert(id2 === mockUser1.username) - }) - }) + assert(typeof id1 === "number"); + assert(id2 === mockUser1.username); + }); + }); await t.step("Should select using cursor pagination", async () => { await useDb(async (db) => { - const users = generateLargeUsers(1_000) - const cr = await db.s_users.addMany(users) - assert(cr.ok) + const users = generateLargeUsers(1_000); + const cr = await db.s_users.addMany(users); + assert(cr.ok); - const selected: Document[] = [] - let cursor: string | undefined = undefined + const selected: Document[] = []; + let cursor: string | undefined = undefined; do { const query = await db.s_users.getMany({ cursor, limit: users.length / 10, - }) + }); - selected.push(...query.result) - cursor = query.cursor - } while (cursor) + selected.push(...query.result); + cursor = query.cursor; + } while (cursor); assert( users.every((user) => selected.some((doc) => doc.value.username === user.username) ), - ) - }) - }) + ); + }); + }); await t.step("Should select using offset pagination", async () => { await useDb(async (db) => { - const users = generateLargeUsers(1_000) - const cr = await db.s_users.addMany(users) - assert(cr.ok) + const users = generateLargeUsers(1_000); + const cr = await db.s_users.addMany(users); + assert(cr.ok); - const selected: Document[] = [] - const limit = 50 + const selected: Document[] = []; + const limit = 50; for (let offset = 0; offset < users.length; offset += limit) { - const { result } = await db.s_users.getMany({ offset, limit }) - selected.push(...result) - assert(result.length === 50) + const { result } = await db.s_users.getMany({ offset, limit }); + selected.push(...result); + assert(result.length === 50); } assert( users.every((user) => selected.some((doc) => doc.value.username === user.username) ), - ) - }) - }) + ); + }); + }); await t.step("Should select filtered", async () => { await useDb(async (db) => { - const users = generateLargeUsers(10) - const cr = await db.s_users.addMany(users) - const count1 = await db.s_users.count() - assert(cr.ok) - assert(count1 === users.length) + const users = generateLargeUsers(10); + const cr = await db.s_users.addMany(users); + const count1 = await db.s_users.count(); + assert(cr.ok); + assert(count1 === users.length); - const sliced = users.slice(5, 7) + const sliced = users.slice(5, 7); const { result } = await db.s_users.getMany({ filter: (doc) => sliced.map((user) => user.username).includes( doc.value.username, ), - }) + }); - assert(result.length === sliced.length) + assert(result.length === sliced.length); assert( result.every((doc) => sliced.some((user) => user.username === doc.value.username) ), - ) - }) - }) + ); + }); + }); await t.step("Should select in reverse", async () => { await useDb(async (db) => { - const users = generateLargeUsers(10) - const cr = await db.s_users.addMany(users) - const count1 = await db.s_users.count() - assert(cr.ok) - assert(count1 === users.length) + const users = generateLargeUsers(10); + const cr = await db.s_users.addMany(users); + const count1 = await db.s_users.count(); + assert(cr.ok); + assert(count1 === users.length); - const query1 = await db.s_users.getMany() - const query2 = await db.s_users.getMany({ reverse: true }) + const query1 = await db.s_users.getMany(); + const query2 = await db.s_users.getMany({ reverse: true }); assert( JSON.stringify(query1.result) === JSON.stringify(query2.result.reverse()), - ) - }) - }) + ); + }); + }); await t.step("Should select from start id", async () => { await useDb(async (db) => { - const users = generateLargeUsers(10) - const cr = await db.s_users.addMany(users) - const count1 = await db.s_users.count() - assert(cr.ok) - assert(count1 === users.length) + const users = generateLargeUsers(10); + const cr = await db.s_users.addMany(users); + const count1 = await db.s_users.count(); + assert(cr.ok); + assert(count1 === users.length); - const index = 5 + const index = 5; - const query1 = await db.s_users.getMany() + const query1 = await db.s_users.getMany(); const query2 = await db.s_users.getMany({ startId: query1.result.at(index)?.id, - }) + }); - assert(query2.result.length === query1.result.slice(index).length) + assert(query2.result.length === query1.result.slice(index).length); assert( query2.result.every((doc1) => query1.result.slice(index).some((doc2) => doc1.id === doc2.id) ), - ) - }) - }) + ); + }); + }); await t.step("Should select until end id", async () => { await useDb(async (db) => { - const users = generateLargeUsers(10) - const cr = await db.s_users.addMany(users) - const count1 = await db.s_users.count() - assert(cr.ok) - assert(count1 === users.length) + const users = generateLargeUsers(10); + const cr = await db.s_users.addMany(users); + const count1 = await db.s_users.count(); + assert(cr.ok); + assert(count1 === users.length); - const index = 5 + const index = 5; - const query1 = await db.s_users.getMany() + const query1 = await db.s_users.getMany(); const query2 = await db.s_users.getMany({ endId: query1.result.at(index)?.id, - }) + }); - assert(query2.result.length === query1.result.slice(0, index).length) + assert(query2.result.length === query1.result.slice(0, index).length); assert( query2.result.every((doc1) => query1.result.slice(0, index).some((doc2) => doc1.id === doc2.id) ), - ) - }) - }) + ); + }); + }); await t.step("Should select from start id to end id", async () => { await useDb(async (db) => { - const users = generateLargeUsers(10) - const cr = await db.s_users.addMany(users) - const count1 = await db.s_users.count() - assert(cr.ok) - assert(count1 === users.length) + const users = generateLargeUsers(10); + const cr = await db.s_users.addMany(users); + const count1 = await db.s_users.count(); + assert(cr.ok); + assert(count1 === users.length); - const index1 = 5 - const index2 = 7 + const index1 = 5; + const index2 = 7; - const query1 = await db.s_users.getMany() + const query1 = await db.s_users.getMany(); const query2 = await db.s_users.getMany({ startId: query1.result.at(index1)?.id, endId: query1.result.at(index2)?.id, - }) + }); assert( query2.result.length === query1.result.slice(index1, index2).length, - ) + ); assert( query2.result.every((doc1) => query1.result.slice(index1, index2).some((doc2) => doc1.id === doc2.id ) ), - ) - }) - }) + ); + }); + }); await t.step("Should select limited by database reads", async () => { await useDb(async (db) => { - const cr1 = await db.s_users.add(mockUser1) - await sleep(10) - const cr2 = await db.s_users.add(mockUser2) - await sleep(10) - const cr3 = await db.s_users.add(mockUser3) + const cr1 = await db.s_users.add(mockUser1); + await sleep(10); + const cr2 = await db.s_users.add(mockUser2); + await sleep(10); + const cr3 = await db.s_users.add(mockUser3); - assert(cr1.ok) - assert(cr2.ok) - assert(cr3.ok) + assert(cr1.ok); + assert(cr2.ok); + assert(cr3.ok); const { result } = await db.s_users.getMany({ limit: 2, filter: (doc) => doc.value.username !== mockUser1.username, - }) + }); - assert(result.every((doc) => doc.value.username === mockUser2.username)) - }) - }) + assert(result.every((doc) => doc.value.username === mockUser2.username)); + }); + }); await t.step("Should select limited by result count", async () => { await useDb(async (db) => { - const cr1 = await db.s_users.add(mockUser1) - await sleep(10) - const cr2 = await db.s_users.add(mockUser2) - await sleep(10) - const cr3 = await db.s_users.add(mockUser3) + const cr1 = await db.s_users.add(mockUser1); + await sleep(10); + const cr2 = await db.s_users.add(mockUser2); + await sleep(10); + const cr3 = await db.s_users.add(mockUser3); - assert(cr1.ok) - assert(cr2.ok) - assert(cr3.ok) + assert(cr1.ok); + assert(cr2.ok); + assert(cr3.ok); const { result } = await db.s_users.getMany({ take: 2, filter: (doc) => doc.value.username !== mockUser1.username, - }) + }); - assert(result.length === 2) - assert(result.some((doc) => doc.value.username === mockUser2.username)) - assert(result.some((doc) => doc.value.username === mockUser3.username)) - }) - }) + assert(result.length === 2); + assert(result.some((doc) => doc.value.username === mockUser2.username)); + assert(result.some((doc) => doc.value.username === mockUser3.username)); + }); + }); await t.step("Should correctly infer type of document", async () => { await useDb(async (db) => { - const doc = await db.s_users.find("") + const doc = await db.s_users.find(""); if (doc) { - doc.value.age.valueOf() + doc.value.age.valueOf(); } - }) - }) + }); + }); await t.step( "Should correctly infer insert and output of asymmetric model", async () => { await useDb(async (db) => { - const cr = await db.as_users.add(mockUser1) - assert(cr.ok) - - const doc = await db.as_users.find(cr.id) - assert(doc !== null) - assert(typeof doc.value.addressStr === "string") - assert(typeof doc.value.decadeAge === "number") - assert(typeof doc.value.name === "string") - }) + const cr = await db.as_users.add(mockUser1); + assert(cr.ok); + + const doc = await db.as_users.find(cr.id); + assert(doc !== null); + assert(typeof doc.value.addressStr === "string"); + assert(typeof doc.value.decadeAge === "number"); + assert(typeof doc.value.name === "string"); + }); }, - ) + ); await t.step("Should successfully generate id asynchronously", async () => { await useKv(async (kv) => { @@ -295,16 +295,16 @@ Deno.test("serialized_collection - properties", async (t) => { const buffer = await crypto.subtle.digest( "SHA-256", new ArrayBuffer(user.age), - ) - return Math.random() * buffer.byteLength + ); + return Math.random() * buffer.byteLength; }, }), - }) + }); - const cr1 = await db.test.add(mockUser1) + const cr1 = await db.test.add(mockUser1); - assert(cr1.ok) - assert(typeof cr1.id === "number") - }) - }) -}) + assert(cr1.ok); + assert(typeof cr1.id === "number"); + }); + }); +}); diff --git a/tests/serialized_collection/set.test.ts b/tests/serialized_collection/set.test.ts index f29f89b..50d4571 100644 --- a/tests/serialized_collection/set.test.ts +++ b/tests/serialized_collection/set.test.ts @@ -1,66 +1,68 @@ -import { assert } from "../test.deps.ts" -import { mockUser1, mockUser2, mockUserInvalid } from "../mocks.ts" -import { useDb } from "../utils.ts" +import { assert } from "../test.deps.ts"; +import { mockUser1, mockUser2, mockUserInvalid } from "../mocks.ts"; +import { useDb } from "../utils.ts"; Deno.test("serialized_collection - set", async (t) => { await t.step("Should set new document entry in collection", async () => { await useDb(async (db) => { - const cr = await db.s_users.set("id", mockUser1) - assert(cr.ok) + const cr = await db.s_users.set("id", mockUser1); + assert(cr.ok); - const doc = await db.s_users.find(cr.id) - assert(doc !== null) - assert(doc.value.username === mockUser1.username) - }) - }) + const doc = await db.s_users.find(cr.id); + assert(doc !== null); + assert(doc.value.username === mockUser1.username); + }); + }); await t.step( "Should not set new document entry in collection with colliding id", async () => { await useDb(async (db) => { - const cr1 = await db.s_users.set("id", mockUser1) - assert(cr1.ok) + const cr1 = await db.s_users.set("id", mockUser1); + assert(cr1.ok); - const cr2 = await db.s_users.set("id", mockUser2) - assert(!cr2.ok) + const cr2 = await db.s_users.set("id", mockUser2); + assert(!cr2.ok); - const doc = await db.s_users.find("id") - assert(doc !== null) - assert(doc.value.username === mockUser1.username) - }) + const doc = await db.s_users.find("id"); + assert(doc !== null); + assert(doc.value.username === mockUser1.username); + }); }, - ) + ); await t.step( "Should overwrite document in collection with colliding id", async () => { await useDb(async (db) => { - const cr1 = await db.s_users.set("id", mockUser1) - assert(cr1.ok) + const cr1 = await db.s_users.set("id", mockUser1); + assert(cr1.ok); - const cr2 = await db.s_users.set("id", mockUser2, { overwrite: true }) - assert(cr2.ok) + const cr2 = await db.s_users.set("id", mockUser2, { overwrite: true }); + assert(cr2.ok); - const doc = await db.s_users.find("id") - assert(doc !== null) - assert(doc.value.username === mockUser2.username) - }) + const doc = await db.s_users.find("id"); + assert(doc !== null); + assert(doc.value.username === mockUser2.username); + }); }, - ) + ); await t.step("Should successfully parse and set document", async () => { await useDb(async (db) => { - let assertion = true - await db.zs_users.set("id", mockUser1).catch(() => assertion = false) - assert(assertion) - }) - }) + let assertion = true; + await db.zs_users.set("id", mockUser1).catch(() => assertion = false); + assert(assertion); + }); + }); await t.step("Should fail to parse and set document", async () => { await useDb(async (db) => { - let assertion = false - await db.zs_users.set("id", mockUserInvalid).catch(() => assertion = true) - assert(assertion) - }) - }) -}) + let assertion = false; + await db.zs_users.set("id", mockUserInvalid).catch(() => + assertion = true + ); + assert(assertion); + }); + }); +}); diff --git a/tests/serialized_collection/types.test.ts b/tests/serialized_collection/types.test.ts index 97fe782..0bbb78e 100644 --- a/tests/serialized_collection/types.test.ts +++ b/tests/serialized_collection/types.test.ts @@ -1,7 +1,7 @@ -import { collection, kvdex, model } from "../../mod.ts" -import { assert, assertEquals } from "../test.deps.ts" -import { useKv } from "../utils.ts" -import { VALUES } from "../values.ts" +import { collection, kvdex, model } from "../../mod.ts"; +import { assert, assertEquals } from "../test.deps.ts"; +import { useKv } from "../utils.ts"; +import { VALUES } from "../values.ts"; Deno.test("serialized_collection - types", async (t) => { await t.step( @@ -13,19 +13,19 @@ Deno.test("serialized_collection - types", async (t) => { val, i, ) => [i, collection(model(), { serialize: "v8" })]), - ) + ); - const db = kvdex(kv, schema) + const db = kvdex(kv, schema); - const crs = await Promise.all(VALUES.map((val, i) => db[i].add(val))) - assert(crs.every((cr) => cr.ok)) + const crs = await Promise.all(VALUES.map((val, i) => db[i].add(val))); + assert(crs.every((cr) => cr.ok)); await Promise.all( VALUES.map((_, i) => db[i].forEach((doc) => assertEquals(doc.value, VALUES[i])) ), - ) - }) + ); + }); }, - ) -}) + ); +}); diff --git a/tests/serialized_collection/update.test.ts b/tests/serialized_collection/update.test.ts index a86a06d..ba87873 100644 --- a/tests/serialized_collection/update.test.ts +++ b/tests/serialized_collection/update.test.ts @@ -1,15 +1,15 @@ -import { collection, kvdex, model } from "../../mod.ts" -import { assert, assertEquals } from "../test.deps.ts" -import { mockUser1, mockUser2, mockUserInvalid } from "../mocks.ts" -import { useDb, useKv } from "../utils.ts" +import { collection, kvdex, model } from "../../mod.ts"; +import { assert, assertEquals } from "../test.deps.ts"; +import { mockUser1, mockUser2, mockUserInvalid } from "../mocks.ts"; +import { useDb, useKv } from "../utils.ts"; Deno.test("serialized_collection - update", async (t) => { await t.step( "Should update document of KvObject type using shallow merge", async () => { await useDb(async (db) => { - const cr = await db.s_users.add(mockUser1) - assert(cr.ok) + const cr = await db.s_users.add(mockUser1); + assert(cr.ok); const updateData = { address: { @@ -17,35 +17,35 @@ Deno.test("serialized_collection - update", async (t) => { city: "Dublin", houseNr: null, }, - } + }; const updateCr = await db.s_users.update(cr.id, updateData, { strategy: "merge-shallow", - }) - - assert(updateCr.ok) - assert(updateCr.id === cr.id) - assert(updateCr.versionstamp !== cr.versionstamp) - - const doc = await db.s_users.find(cr.id) - - assert(doc !== null) - assert(doc.value.username === mockUser1.username) - assert(doc.value.age === mockUser1.age) - assert(doc.value.address.country === updateData.address.country) - assert(doc.value.address.city === updateData.address.city) - assert(doc.value.address.houseNr === updateData.address.houseNr) - assert(typeof doc.value.address.street === "undefined") - }) + }); + + assert(updateCr.ok); + assert(updateCr.id === cr.id); + assert(updateCr.versionstamp !== cr.versionstamp); + + const doc = await db.s_users.find(cr.id); + + assert(doc !== null); + assert(doc.value.username === mockUser1.username); + assert(doc.value.age === mockUser1.age); + assert(doc.value.address.country === updateData.address.country); + assert(doc.value.address.city === updateData.address.city); + assert(doc.value.address.houseNr === updateData.address.houseNr); + assert(typeof doc.value.address.street === "undefined"); + }); }, - ) + ); await t.step( "Should update document of KvObject type using deep merge", async () => { await useDb(async (db) => { - const cr = await db.s_users.add(mockUser1) - assert(cr.ok) + const cr = await db.s_users.add(mockUser1); + assert(cr.ok); const updateData = { address: { @@ -53,56 +53,56 @@ Deno.test("serialized_collection - update", async (t) => { city: "Dublin", houseNr: null, }, - } + }; const updateCr = await db.s_users.update(cr.id, updateData, { strategy: "merge", - }) - - assert(updateCr.ok) - assert(updateCr.id === cr.id) - assert(updateCr.versionstamp !== cr.versionstamp) - - const doc = await db.s_users.find(cr.id) - - assert(doc !== null) - assert(doc.value.username === mockUser1.username) - assert(doc.value.age === mockUser1.age) - assert(doc.value.address.country === updateData.address.country) - assert(doc.value.address.city === updateData.address.city) - assert(doc.value.address.houseNr === updateData.address.houseNr) - assert(doc.value.address.street === mockUser1.address.street) - }) + }); + + assert(updateCr.ok); + assert(updateCr.id === cr.id); + assert(updateCr.versionstamp !== cr.versionstamp); + + const doc = await db.s_users.find(cr.id); + + assert(doc !== null); + assert(doc.value.username === mockUser1.username); + assert(doc.value.age === mockUser1.age); + assert(doc.value.address.country === updateData.address.country); + assert(doc.value.address.city === updateData.address.city); + assert(doc.value.address.houseNr === updateData.address.houseNr); + assert(doc.value.address.street === mockUser1.address.street); + }); }, - ) + ); await t.step( "Should update document of KvObject type using replace", async () => { await useDb(async (db) => { - const cr = await db.s_users.add(mockUser1) - assert(cr.ok) + const cr = await db.s_users.add(mockUser1); + assert(cr.ok); const updateCr = await db.s_users.update(cr.id, mockUser2, { strategy: "replace", - }) - - assert(updateCr.ok) - assert(updateCr.id === cr.id) - assert(updateCr.versionstamp !== cr.versionstamp) - - const doc = await db.s_users.find(cr.id) - - assert(doc !== null) - assert(doc.value.username === mockUser2.username) - assert(doc.value.age === mockUser2.age) - assert(doc.value.address.country === mockUser2.address.country) - assert(doc.value.address.city === mockUser2.address.city) - assert(doc.value.address.houseNr === mockUser2.address.houseNr) - assert(doc.value.address.street === mockUser2.address.street) - }) + }); + + assert(updateCr.ok); + assert(updateCr.id === cr.id); + assert(updateCr.versionstamp !== cr.versionstamp); + + const doc = await db.s_users.find(cr.id); + + assert(doc !== null); + assert(doc.value.username === mockUser2.username); + assert(doc.value.age === mockUser2.age); + assert(doc.value.address.country === mockUser2.address.country); + assert(doc.value.address.city === mockUser2.address.city); + assert(doc.value.address.houseNr === mockUser2.address.houseNr); + assert(doc.value.address.street === mockUser2.address.street); + }); }, - ) + ); await t.step( "Should update documents of type Array, Set and Map using merge", @@ -112,57 +112,57 @@ Deno.test("serialized_collection - update", async (t) => { arrays: collection(model()), sets: collection(model>()), maps: collection(model>()), - }) + }); - const val1 = [1, 2, 4] - const setEntries = [1, 2, 4] - const val2 = new Set(setEntries) - const mapEntries = [["1", 1], ["2", 2], ["4", 4]] as const - const val3 = new Map(mapEntries) + const val1 = [1, 2, 4]; + const setEntries = [1, 2, 4]; + const val2 = new Set(setEntries); + const mapEntries = [["1", 1], ["2", 2], ["4", 4]] as const; + const val3 = new Map(mapEntries); - const cr1 = await db.arrays.add(val1) - const cr2 = await db.sets.add(val2) - const cr3 = await db.maps.add(val3) + const cr1 = await db.arrays.add(val1); + const cr2 = await db.sets.add(val2); + const cr3 = await db.maps.add(val3); - assert(cr1.ok) - assert(cr2.ok) - assert(cr3.ok) + assert(cr1.ok); + assert(cr2.ok); + assert(cr3.ok); - const u1 = [1, 3, 5] - const uSetEntries = [1, 3, 5] - const u2 = new Set(uSetEntries) - const uMapEntries = [["1", 1], ["3", 3], ["5", 5]] as const - const u3 = new Map(uMapEntries) + const u1 = [1, 3, 5]; + const uSetEntries = [1, 3, 5]; + const u2 = new Set(uSetEntries); + const uMapEntries = [["1", 1], ["3", 3], ["5", 5]] as const; + const u3 = new Map(uMapEntries); const updateCr1 = await db.arrays.update(cr1.id, u1, { strategy: "merge", - }) + }); const updateCr2 = await db.sets.update(cr2.id, u2, { strategy: "merge", - }) + }); const updateCr3 = await db.maps.update(cr3.id, u3, { strategy: "merge", - }) - - assert(updateCr1.ok) - assert(updateCr2.ok) - assert(updateCr3.ok) - - const doc1 = await db.arrays.find(cr1.id) - const doc2 = await db.sets.find(cr2.id) - const doc3 = await db.maps.find(cr3.id) - - assert(doc1 !== null) - assert(doc2 !== null) - assert(doc3 !== null) - assertEquals(doc1.value, [...val1, ...u1]) - assertEquals(doc2.value, new Set([...setEntries, ...uSetEntries])) - assertEquals(doc3.value, new Map([...mapEntries, ...uMapEntries])) - }) + }); + + assert(updateCr1.ok); + assert(updateCr2.ok); + assert(updateCr3.ok); + + const doc1 = await db.arrays.find(cr1.id); + const doc2 = await db.sets.find(cr2.id); + const doc3 = await db.maps.find(cr3.id); + + assert(doc1 !== null); + assert(doc2 !== null); + assert(doc3 !== null); + assertEquals(doc1.value, [...val1, ...u1]); + assertEquals(doc2.value, new Set([...setEntries, ...uSetEntries])); + assertEquals(doc3.value, new Map([...mapEntries, ...uMapEntries])); + }); }, - ) + ); await t.step( "Should update documents of primitive and built-in object types using replace", @@ -172,65 +172,65 @@ Deno.test("serialized_collection - update", async (t) => { numbers: collection(model()), strings: collection(model()), dates: collection(model()), - }) - - const cr1 = await db.numbers.add(10) - const cr2 = await db.strings.add("10") - const cr3 = await db.dates.add(new Date("2000-01-01")) - assert(cr1.ok && cr2.ok && cr3.ok) - - const val1 = 20 - const val2 = "20" - const val3 = new Date("2016-01-01") - - const updateCr1 = await db.numbers.update(cr1.id, val1) - const updateCr2 = await db.strings.update(cr2.id, val2) - const updateCr3 = await db.dates.update(cr3.id, val3) - assert(updateCr1.ok && updateCr2.ok && updateCr3.ok) - assert(updateCr1.id === cr1.id) - assert(updateCr1.versionstamp !== cr1.versionstamp) - assert(updateCr2.id === cr2.id) - assert(updateCr2.versionstamp !== cr2.versionstamp) - assert(updateCr3.id === cr3.id) - assert(updateCr3.versionstamp !== cr3.versionstamp) - - const doc1 = await db.numbers.find(cr1.id) - const doc2 = await db.strings.find(cr2.id) - const doc3 = await db.dates.find(cr3.id) - assert(doc1 !== null && doc2 !== null && doc3 !== null) - - assert(doc1.value === val1) - assert(doc2.value === val2) - assert(doc3.value.valueOf() === val3.valueOf()) - }) + }); + + const cr1 = await db.numbers.add(10); + const cr2 = await db.strings.add("10"); + const cr3 = await db.dates.add(new Date("2000-01-01")); + assert(cr1.ok && cr2.ok && cr3.ok); + + const val1 = 20; + const val2 = "20"; + const val3 = new Date("2016-01-01"); + + const updateCr1 = await db.numbers.update(cr1.id, val1); + const updateCr2 = await db.strings.update(cr2.id, val2); + const updateCr3 = await db.dates.update(cr3.id, val3); + assert(updateCr1.ok && updateCr2.ok && updateCr3.ok); + assert(updateCr1.id === cr1.id); + assert(updateCr1.versionstamp !== cr1.versionstamp); + assert(updateCr2.id === cr2.id); + assert(updateCr2.versionstamp !== cr2.versionstamp); + assert(updateCr3.id === cr3.id); + assert(updateCr3.versionstamp !== cr3.versionstamp); + + const doc1 = await db.numbers.find(cr1.id); + const doc2 = await db.strings.find(cr2.id); + const doc3 = await db.dates.find(cr3.id); + assert(doc1 !== null && doc2 !== null && doc3 !== null); + + assert(doc1.value === val1); + assert(doc2.value === val2); + assert(doc3.value.valueOf() === val3.valueOf()); + }); }, - ) + ); await t.step("Should successfully parse and update document", async () => { await useDb(async (db) => { - let assertion = true + let assertion = true; - const cr = await db.zs_users.add(mockUser1) - assert(cr.ok) + const cr = await db.zs_users.add(mockUser1); + assert(cr.ok); - await db.zs_users.update(cr.id, mockUser2).catch(() => assertion = false) + await db.zs_users.update(cr.id, mockUser2).catch(() => assertion = false); - assert(assertion) - }) - }) + assert(assertion); + }); + }); await t.step("Should fail to parse and update document", async () => { await useDb(async (db) => { - let assertion = false + let assertion = false; - const cr = await db.zs_users.add(mockUser1) - assert(cr.ok) + const cr = await db.zs_users.add(mockUser1); + assert(cr.ok); await db.zs_users.update(cr.id, mockUserInvalid).catch(() => assertion = true - ) + ); - assert(assertion) - }) - }) -}) + assert(assertion); + }); + }); +}); diff --git a/tests/serialized_collection/updateMany.test.ts b/tests/serialized_collection/updateMany.test.ts index 243642c..b138084 100644 --- a/tests/serialized_collection/updateMany.test.ts +++ b/tests/serialized_collection/updateMany.test.ts @@ -1,20 +1,20 @@ -import { collection, kvdex, model } from "../../mod.ts" -import { assert, assertEquals } from "../test.deps.ts" -import { mockUser1, mockUserInvalid } from "../mocks.ts" -import { generateNumbers, generateUsers, useDb, useKv } from "../utils.ts" +import { collection, kvdex, model } from "../../mod.ts"; +import { assert, assertEquals } from "../test.deps.ts"; +import { mockUser1, mockUserInvalid } from "../mocks.ts"; +import { generateNumbers, generateUsers, useDb, useKv } from "../utils.ts"; Deno.test("serialized_collection - updateMany", async (t) => { await t.step( "Should update 1000 documents of KvObject type using shallow merge", async () => { await useDb(async (db) => { - const users = generateUsers(1_000) - const cr = await db.s_users.addMany(users) - assert(cr.ok) + const users = generateUsers(1_000); + const cr = await db.s_users.addMany(users); + assert(cr.ok); - const docs = await db.s_users.getMany() - const ids = docs.result.map((doc) => doc.id) - const versionstamps = docs.result.map((doc) => doc.versionstamp) + const docs = await db.s_users.getMany(); + const ids = docs.result.map((doc) => doc.id); + const versionstamps = docs.result.map((doc) => doc.versionstamp); const updateData = { address: { @@ -22,40 +22,40 @@ Deno.test("serialized_collection - updateMany", async (t) => { city: "Dublin", houseNr: null, }, - } + }; const { result } = await db.s_users.updateMany(updateData, { strategy: "merge-shallow", - }) + }); assert( result.every((cr) => cr.ok && ids.includes(cr.id) && !versionstamps.includes(cr.versionstamp) ), - ) + ); await db.s_users.forEach((doc) => { - assert(doc.value.address.country === updateData.address.country) - assert(doc.value.address.city === updateData.address.city) - assert(doc.value.address.houseNr === updateData.address.houseNr) - assert(typeof doc.value.address.street === "undefined") - }) - }) + assert(doc.value.address.country === updateData.address.country); + assert(doc.value.address.city === updateData.address.city); + assert(doc.value.address.houseNr === updateData.address.houseNr); + assert(typeof doc.value.address.street === "undefined"); + }); + }); }, - ) + ); await t.step( "Should update 1000 documents of KvObject type using deep merge", async () => { await useDb(async (db) => { - const users = generateUsers(1_000) - const cr = await db.s_users.addMany(users) - assert(cr.ok) + const users = generateUsers(1_000); + const cr = await db.s_users.addMany(users); + assert(cr.ok); - const docs = await db.s_users.getMany() - const ids = docs.result.map((doc) => doc.id) - const versionstamps = docs.result.map((doc) => doc.versionstamp) + const docs = await db.s_users.getMany(); + const ids = docs.result.map((doc) => doc.id); + const versionstamps = docs.result.map((doc) => doc.versionstamp); const updateData = { address: { @@ -63,61 +63,61 @@ Deno.test("serialized_collection - updateMany", async (t) => { city: "Dublin", houseNr: null, }, - } + }; const { result } = await db.s_users.updateMany(updateData, { strategy: "merge", - }) + }); assert( result.every((cr) => cr.ok && ids.includes(cr.id) && !versionstamps.includes(cr.versionstamp) ), - ) + ); await db.s_users.forEach((doc) => { - assert(doc.value.address.country === updateData.address.country) - assert(doc.value.address.city === updateData.address.city) - assert(doc.value.address.houseNr === updateData.address.houseNr) - assert(doc.value.address.street !== undefined) - }) - }) + assert(doc.value.address.country === updateData.address.country); + assert(doc.value.address.city === updateData.address.city); + assert(doc.value.address.houseNr === updateData.address.houseNr); + assert(doc.value.address.street !== undefined); + }); + }); }, - ) + ); await t.step( "Should update 1000 documents of KvObject type using replace", async () => { await useDb(async (db) => { - const users = generateUsers(1_000) - const cr = await db.s_users.addMany(users) - assert(cr.ok) + const users = generateUsers(1_000); + const cr = await db.s_users.addMany(users); + assert(cr.ok); - const docs = await db.s_users.getMany() - const ids = docs.result.map((doc) => doc.id) - const versionstamps = docs.result.map((doc) => doc.versionstamp) + const docs = await db.s_users.getMany(); + const ids = docs.result.map((doc) => doc.id); + const versionstamps = docs.result.map((doc) => doc.versionstamp); const { result } = await db.s_users.updateMany(mockUser1, { strategy: "replace", - }) + }); assert( result.every((cr) => cr.ok && ids.includes(cr.id) && !versionstamps.includes(cr.versionstamp) ), - ) + ); await db.s_users.forEach((doc) => { - assert(doc.value.address.country === mockUser1.address.country) - assert(doc.value.address.city === mockUser1.address.city) - assert(doc.value.address.houseNr === mockUser1.address.houseNr) - assert(doc.value.address.street === mockUser1.address.street) - }) - }) + assert(doc.value.address.country === mockUser1.address.country); + assert(doc.value.address.city === mockUser1.address.city); + assert(doc.value.address.houseNr === mockUser1.address.houseNr); + assert(doc.value.address.street === mockUser1.address.street); + }); + }); }, - ) + ); await t.step( "Should update 1000 documents of type Array, Set and Map using merge", @@ -127,71 +127,71 @@ Deno.test("serialized_collection - updateMany", async (t) => { arrays: collection(model()), sets: collection(model>()), maps: collection(model>()), - }) + }); - const val1 = [1, 2, 4] - const setEntries = [1, 2, 4] - const val2 = new Set(setEntries) - const mapEntries = [["1", 1], ["2", 2], ["4", 4]] as const - const val3 = new Map(mapEntries) + const val1 = [1, 2, 4]; + const setEntries = [1, 2, 4]; + const val2 = new Set(setEntries); + const mapEntries = [["1", 1], ["2", 2], ["4", 4]] as const; + const val3 = new Map(mapEntries); - const vals1: number[][] = [] - const vals2: Set[] = [] - const vals3: Map[] = [] + const vals1: number[][] = []; + const vals2: Set[] = []; + const vals3: Map[] = []; for (let i = 0; i < 1_000; i++) { - vals1.push(val1) - vals2.push(val2) - vals3.push(val3) + vals1.push(val1); + vals2.push(val2); + vals3.push(val3); } - const cr1 = await db.arrays.addMany(vals1) - const cr2 = await db.sets.addMany(vals2) - const cr3 = await db.maps.addMany(vals3) + const cr1 = await db.arrays.addMany(vals1); + const cr2 = await db.sets.addMany(vals2); + const cr3 = await db.maps.addMany(vals3); - assert(cr1.ok) - assert(cr2.ok) - assert(cr3.ok) + assert(cr1.ok); + assert(cr2.ok); + assert(cr3.ok); - const u1 = [1, 3, 5] - const uSetEntries = [1, 3, 5] - const u2 = new Set(uSetEntries) - const uMapEntries = [["1", 1], ["3", 3], ["5", 5]] as const - const u3 = new Map(uMapEntries) + const u1 = [1, 3, 5]; + const uSetEntries = [1, 3, 5]; + const u2 = new Set(uSetEntries); + const uMapEntries = [["1", 1], ["3", 3], ["5", 5]] as const; + const u3 = new Map(uMapEntries); const { result: updateCrs1 } = await db.arrays.updateMany(u1, { strategy: "merge", - }) + }); const { result: updateCrs2 } = await db.sets.updateMany(u2, { strategy: "merge", - }) + }); const { result: updateCrs3 } = await db.maps.updateMany(u3, { strategy: "merge", - }) + }); - assert(updateCrs1.every((cr) => cr.ok)) - assert(updateCrs2.every((cr) => cr.ok)) - assert(updateCrs3.every((cr) => cr.ok)) + assert(updateCrs1.every((cr) => cr.ok)); + assert(updateCrs2.every((cr) => cr.ok)); + assert(updateCrs3.every((cr) => cr.ok)); - const { result: docs1 } = await db.arrays.getMany() - const { result: docs2 } = await db.sets.getMany() - const { result: docs3 } = await db.maps.getMany() + const { result: docs1 } = await db.arrays.getMany(); + const { result: docs2 } = await db.sets.getMany(); + const { result: docs3 } = await db.maps.getMany(); - assert(docs1.length === vals1.length) - assert(docs2.length === vals2.length) - assert(docs3.length === vals3.length) + assert(docs1.length === vals1.length); + assert(docs2.length === vals2.length); + assert(docs3.length === vals3.length); - docs1.forEach((doc) => assertEquals(doc.value, [...val1, ...u1])) + docs1.forEach((doc) => assertEquals(doc.value, [...val1, ...u1])); docs2.forEach((doc) => assertEquals(doc.value, new Set([...setEntries, ...uSetEntries])) - ) + ); docs3.forEach((doc) => assertEquals(doc.value, new Map([...mapEntries, ...uMapEntries])) - ) - }) + ); + }); }, - ) + ); await t.step( "Should update 1000 documents of types primitive and built-in object using replace", @@ -201,100 +201,100 @@ Deno.test("serialized_collection - updateMany", async (t) => { numbers: collection(model()), strings: collection(model()), dates: collection(model()), - }) + }); - const numbers = generateNumbers(1_000) + const numbers = generateNumbers(1_000); - const strings: string[] = [] + const strings: string[] = []; for (let i = 0; i < 1_000; i++) { - strings.push(Math.random().toString()) + strings.push(Math.random().toString()); } - const dates: Date[] = [] + const dates: Date[] = []; for (let i = 0; i < 1_000; i++) { - dates.push(new Date("2000-01-01")) + dates.push(new Date("2000-01-01")); } - const cr1 = await db.numbers.addMany(numbers) - const cr2 = await db.strings.addMany(strings) - const cr3 = await db.dates.addMany(dates) + const cr1 = await db.numbers.addMany(numbers); + const cr2 = await db.strings.addMany(strings); + const cr3 = await db.dates.addMany(dates); - assert(cr1.ok) - assert(cr2.ok) - assert(cr3.ok) + assert(cr1.ok); + assert(cr2.ok); + assert(cr3.ok); - const docs1 = await db.numbers.getMany() - const docs2 = await db.strings.getMany() - const docs3 = await db.dates.getMany() + const docs1 = await db.numbers.getMany(); + const docs2 = await db.strings.getMany(); + const docs3 = await db.dates.getMany(); - const ids1 = docs1.result.map((doc) => doc.id) - const ids2 = docs2.result.map((doc) => doc.id) - const ids3 = docs3.result.map((doc) => doc.id) + const ids1 = docs1.result.map((doc) => doc.id); + const ids2 = docs2.result.map((doc) => doc.id); + const ids3 = docs3.result.map((doc) => doc.id); - const versionstamps1 = docs1.result.map((doc) => doc.versionstamp) - const versionstamps2 = docs2.result.map((doc) => doc.versionstamp) - const versionstamps3 = docs3.result.map((doc) => doc.versionstamp) + const versionstamps1 = docs1.result.map((doc) => doc.versionstamp); + const versionstamps2 = docs2.result.map((doc) => doc.versionstamp); + const versionstamps3 = docs3.result.map((doc) => doc.versionstamp); - const val1 = 20 - const val2 = "updated" - const val3 = new Date("2016-01-01") + const val1 = 20; + const val2 = "updated"; + const val3 = new Date("2016-01-01"); - const updateQuery1 = await db.numbers.updateMany(val1) - const updateQuery2 = await db.strings.updateMany(val2) - const updateQuery3 = await db.dates.updateMany(val3) + const updateQuery1 = await db.numbers.updateMany(val1); + const updateQuery2 = await db.strings.updateMany(val2); + const updateQuery3 = await db.dates.updateMany(val3); - assert(updateQuery1.result.every((cr) => cr.ok)) - assert(updateQuery2.result.every((cr) => cr.ok)) - assert(updateQuery3.result.every((cr) => cr.ok)) + assert(updateQuery1.result.every((cr) => cr.ok)); + assert(updateQuery2.result.every((cr) => cr.ok)); + assert(updateQuery3.result.every((cr) => cr.ok)); await db.numbers.forEach((doc) => { - assertEquals(doc.value, val1) - assert(ids1.includes(doc.id)) - assert(!versionstamps1.includes(doc.versionstamp)) - }) + assertEquals(doc.value, val1); + assert(ids1.includes(doc.id)); + assert(!versionstamps1.includes(doc.versionstamp)); + }); await db.strings.forEach((doc) => { - assertEquals(doc.value, val2) - assert(ids2.includes(doc.id)) - assert(!versionstamps2.includes(doc.versionstamp)) - }) + assertEquals(doc.value, val2); + assert(ids2.includes(doc.id)); + assert(!versionstamps2.includes(doc.versionstamp)); + }); await db.dates.forEach((doc) => { - assertEquals(doc.value, val3) - assert(ids3.includes(doc.id)) - assert(!versionstamps3.includes(doc.versionstamp)) - }) - }) + assertEquals(doc.value, val3); + assert(ids3.includes(doc.id)); + assert(!versionstamps3.includes(doc.versionstamp)); + }); + }); }, - ) + ); await t.step("Should successfully parse and update", async () => { await useDb(async (db) => { - const users = generateUsers(10) - let assertion = true + const users = generateUsers(10); + let assertion = true; - const cr = await db.zs_users.addMany(users) - assert(cr.ok) + const cr = await db.zs_users.addMany(users); + assert(cr.ok); - await db.zs_users.updateMany(mockUser1).catch(() => assertion = false) + await db.zs_users.updateMany(mockUser1).catch(() => assertion = false); - assert(assertion) - }) - }) + assert(assertion); + }); + }); await t.step("Should fail to parse and update document", async () => { await useDb(async (db) => { - const users = generateUsers(10) - let assertion = false + const users = generateUsers(10); + let assertion = false; - const cr = await db.zs_users.addMany(users) - assert(cr.ok) + const cr = await db.zs_users.addMany(users); + assert(cr.ok); await db.zs_users.updateMany(mockUserInvalid).catch(() => assertion = true - ) + ); - assert(assertion) - }) - }) -}) + assert(assertion); + }); + }); +}); diff --git a/tests/serialized_collection/updateOne.test.ts b/tests/serialized_collection/updateOne.test.ts index 40ceb93..35c35e5 100644 --- a/tests/serialized_collection/updateOne.test.ts +++ b/tests/serialized_collection/updateOne.test.ts @@ -1,25 +1,25 @@ -import { collection, kvdex, model } from "../../mod.ts" -import { assert, assertEquals, assertNotEquals } from "../test.deps.ts" -import { mockUser1, mockUser2, mockUser3, mockUserInvalid } from "../mocks.ts" +import { collection, kvdex, model } from "../../mod.ts"; +import { assert, assertEquals, assertNotEquals } from "../test.deps.ts"; +import { mockUser1, mockUser2, mockUser3, mockUserInvalid } from "../mocks.ts"; import { generateNumbers, generateUsers, sleep, useDb, useKv, -} from "../utils.ts" +} from "../utils.ts"; Deno.test("serialized_collection - updateOne", async (t) => { await t.step( "Should update only one document of KvObject type using shallow merge", async () => { await useDb(async (db) => { - const cr1 = await db.s_users.add(mockUser1) - await sleep(10) - const cr2 = await db.s_users.add(mockUser2) + const cr1 = await db.s_users.add(mockUser1); + await sleep(10); + const cr2 = await db.s_users.add(mockUser2); - assert(cr1.ok) - assert(cr2.ok) + assert(cr1.ok); + assert(cr2.ok); const updateData = { address: { @@ -27,43 +27,43 @@ Deno.test("serialized_collection - updateOne", async (t) => { city: "Dublin", houseNr: null, }, - } + }; const updateCr = await db.s_users.updateOne(updateData, { strategy: "merge-shallow", - }) + }); - assert(updateCr.ok) + assert(updateCr.ok); - const doc1 = await db.s_users.find(cr1.id) - const doc2 = await db.s_users.find(cr2.id) + const doc1 = await db.s_users.find(cr1.id); + const doc2 = await db.s_users.find(cr2.id); - assert(doc1) - assert(doc2) + assert(doc1); + assert(doc2); - assert(doc1.value.address.country === updateData.address.country) - assert(doc1.value.address.city === updateData.address.city) - assert(doc1.value.address.houseNr === updateData.address.houseNr) - assert(doc1.value.address.street === undefined) + assert(doc1.value.address.country === updateData.address.country); + assert(doc1.value.address.city === updateData.address.city); + assert(doc1.value.address.houseNr === updateData.address.houseNr); + assert(doc1.value.address.street === undefined); - assert(doc2.value.address.country === mockUser2.address.country) - assert(doc2.value.address.city === mockUser2.address.city) - assert(doc2.value.address.houseNr === mockUser2.address.houseNr) - assert(doc2.value.address.street === mockUser2.address.street) - }) + assert(doc2.value.address.country === mockUser2.address.country); + assert(doc2.value.address.city === mockUser2.address.city); + assert(doc2.value.address.houseNr === mockUser2.address.houseNr); + assert(doc2.value.address.street === mockUser2.address.street); + }); }, - ) + ); await t.step( "Should update only one document of KvObject type using deep merge", async () => { await useDb(async (db) => { - const cr1 = await db.s_users.add(mockUser1) - await sleep(10) - const cr2 = await db.s_users.add(mockUser2) + const cr1 = await db.s_users.add(mockUser1); + await sleep(10); + const cr2 = await db.s_users.add(mockUser2); - assert(cr1.ok) - assert(cr2.ok) + assert(cr1.ok); + assert(cr2.ok); const updateData = { address: { @@ -71,74 +71,74 @@ Deno.test("serialized_collection - updateOne", async (t) => { city: "Dublin", houseNr: null, }, - } + }; const updateCr = await db.s_users.updateOne(updateData, { strategy: "merge", - }) + }); - assert(updateCr.ok) + assert(updateCr.ok); - const doc1 = await db.s_users.find(cr1.id) - const doc2 = await db.s_users.find(cr2.id) + const doc1 = await db.s_users.find(cr1.id); + const doc2 = await db.s_users.find(cr2.id); - assert(doc1) - assert(doc2) + assert(doc1); + assert(doc2); - assert(doc1.value.address.country === updateData.address.country) - assert(doc1.value.address.city === updateData.address.city) - assert(doc1.value.address.houseNr === updateData.address.houseNr) - assert(doc1.value.address.street === mockUser1.address.street) + assert(doc1.value.address.country === updateData.address.country); + assert(doc1.value.address.city === updateData.address.city); + assert(doc1.value.address.houseNr === updateData.address.houseNr); + assert(doc1.value.address.street === mockUser1.address.street); - assert(doc2.value.address.country === mockUser2.address.country) - assert(doc2.value.address.city === mockUser2.address.city) - assert(doc2.value.address.houseNr === mockUser2.address.houseNr) - assert(doc2.value.address.street === mockUser2.address.street) - }) + assert(doc2.value.address.country === mockUser2.address.country); + assert(doc2.value.address.city === mockUser2.address.city); + assert(doc2.value.address.houseNr === mockUser2.address.houseNr); + assert(doc2.value.address.street === mockUser2.address.street); + }); }, - ) + ); await t.step( "Should update only one document of KvObject type using replace", async () => { await useDb(async (db) => { - const cr1 = await db.s_users.add(mockUser1) - await sleep(10) - const cr2 = await db.s_users.add(mockUser2) + const cr1 = await db.s_users.add(mockUser1); + await sleep(10); + const cr2 = await db.s_users.add(mockUser2); - assert(cr1.ok) - assert(cr2.ok) + assert(cr1.ok); + assert(cr2.ok); - const updateData = mockUser3 + const updateData = mockUser3; const updateCr = await db.s_users.updateOne(updateData, { strategy: "replace", - }) - - assert(updateCr.ok) - - const doc1 = await db.s_users.find(cr1.id) - const doc2 = await db.s_users.find(cr2.id) - - assert(doc1) - assert(doc2) - - assert(doc1.value.username === updateData.username) - assert(doc1.value.age === updateData.age) - assert(doc1.value.address.country === updateData.address.country) - assert(doc1.value.address.city === updateData.address.city) - assert(doc1.value.address.houseNr === updateData.address.houseNr) - assert(doc1.value.address.street === undefined) - - assert(doc2.value.username === mockUser2.username) - assert(doc2.value.age === mockUser2.age) - assert(doc2.value.address.country === mockUser2.address.country) - assert(doc2.value.address.city === mockUser2.address.city) - assert(doc2.value.address.houseNr === mockUser2.address.houseNr) - assert(doc2.value.address.street === mockUser2.address.street) - }) + }); + + assert(updateCr.ok); + + const doc1 = await db.s_users.find(cr1.id); + const doc2 = await db.s_users.find(cr2.id); + + assert(doc1); + assert(doc2); + + assert(doc1.value.username === updateData.username); + assert(doc1.value.age === updateData.age); + assert(doc1.value.address.country === updateData.address.country); + assert(doc1.value.address.city === updateData.address.city); + assert(doc1.value.address.houseNr === updateData.address.houseNr); + assert(doc1.value.address.street === undefined); + + assert(doc2.value.username === mockUser2.username); + assert(doc2.value.age === mockUser2.age); + assert(doc2.value.address.country === mockUser2.address.country); + assert(doc2.value.address.city === mockUser2.address.city); + assert(doc2.value.address.houseNr === mockUser2.address.houseNr); + assert(doc2.value.address.street === mockUser2.address.street); + }); }, - ) + ); await t.step( "Should update only one document of type Array, Set and Map using merge", @@ -148,71 +148,71 @@ Deno.test("serialized_collection - updateOne", async (t) => { arrays: collection(model()), sets: collection(model>()), maps: collection(model>()), - }) + }); - const val1 = [1, 2, 4] - const setEntries = [1, 2, 4] - const val2 = new Set(setEntries) - const mapEntries = [["1", 1], ["2", 2], ["4", 4]] as const - const val3 = new Map(mapEntries) + const val1 = [1, 2, 4]; + const setEntries = [1, 2, 4]; + const val2 = new Set(setEntries); + const mapEntries = [["1", 1], ["2", 2], ["4", 4]] as const; + const val3 = new Map(mapEntries); - const vals1: number[][] = [] - const vals2: Set[] = [] - const vals3: Map[] = [] + const vals1: number[][] = []; + const vals2: Set[] = []; + const vals3: Map[] = []; for (let i = 0; i < 1_000; i++) { - vals1.push(val1) - vals2.push(val2) - vals3.push(val3) + vals1.push(val1); + vals2.push(val2); + vals3.push(val3); } - const cr1 = await db.arrays.addMany(vals1) - const cr2 = await db.sets.addMany(vals2) - const cr3 = await db.maps.addMany(vals3) + const cr1 = await db.arrays.addMany(vals1); + const cr2 = await db.sets.addMany(vals2); + const cr3 = await db.maps.addMany(vals3); - assert(cr1.ok) - assert(cr2.ok) - assert(cr3.ok) + assert(cr1.ok); + assert(cr2.ok); + assert(cr3.ok); - const u1 = [1, 3, 5] - const uSetEntries = [1, 3, 5] - const u2 = new Set(uSetEntries) - const uMapEntries = [["1", 1], ["3", 3], ["5", 5]] as const - const u3 = new Map(uMapEntries) + const u1 = [1, 3, 5]; + const uSetEntries = [1, 3, 5]; + const u2 = new Set(uSetEntries); + const uMapEntries = [["1", 1], ["3", 3], ["5", 5]] as const; + const u3 = new Map(uMapEntries); const updateCr1 = await db.arrays.updateOne(u1, { strategy: "merge", - }) + }); const updateCr2 = await db.sets.updateOne(u2, { strategy: "merge", - }) + }); const updateCr3 = await db.maps.updateOne(u3, { strategy: "merge", - }) + }); - assert(updateCr1.ok) - assert(updateCr2.ok) - assert(updateCr3.ok) + assert(updateCr1.ok); + assert(updateCr2.ok); + assert(updateCr3.ok); - const { result: [d1, ...docs1] } = await db.arrays.getMany() - const { result: [d2, ...docs2] } = await db.sets.getMany() - const { result: [d3, ...docs3] } = await db.maps.getMany() + const { result: [d1, ...docs1] } = await db.arrays.getMany(); + const { result: [d2, ...docs2] } = await db.sets.getMany(); + const { result: [d3, ...docs3] } = await db.maps.getMany(); - assertEquals(d1.value, [...val1, ...u1]) - assertEquals(d2.value, new Set([...setEntries, ...uSetEntries])) - assertEquals(d3.value, new Map([...mapEntries, ...uMapEntries])) + assertEquals(d1.value, [...val1, ...u1]); + assertEquals(d2.value, new Set([...setEntries, ...uSetEntries])); + assertEquals(d3.value, new Map([...mapEntries, ...uMapEntries])); - docs1.forEach((doc) => assertNotEquals(doc.value, [...val1, ...u1])) + docs1.forEach((doc) => assertNotEquals(doc.value, [...val1, ...u1])); docs2.forEach((doc) => assertNotEquals(doc.value, new Set([...setEntries, ...uSetEntries])) - ) + ); docs3.forEach((doc) => assertNotEquals(doc.value, new Map([...mapEntries, ...uMapEntries])) - ) - }) + ); + }); }, - ) + ); await t.step( "Should update only one document of types primitive and built-in object using replace", @@ -222,81 +222,83 @@ Deno.test("serialized_collection - updateOne", async (t) => { numbers: collection(model()), strings: collection(model()), dates: collection(model()), - }) + }); - const numbers = generateNumbers(1_000) + const numbers = generateNumbers(1_000); - const strings: string[] = [] + const strings: string[] = []; for (let i = 0; i < 1_000; i++) { - strings.push(Math.random().toString()) + strings.push(Math.random().toString()); } - const dates: Date[] = [] + const dates: Date[] = []; for (let i = 0; i < 1_000; i++) { - dates.push(new Date("2000-01-01")) + dates.push(new Date("2000-01-01")); } - const cr1 = await db.numbers.addMany(numbers) - const cr2 = await db.strings.addMany(strings) - const cr3 = await db.dates.addMany(dates) + const cr1 = await db.numbers.addMany(numbers); + const cr2 = await db.strings.addMany(strings); + const cr3 = await db.dates.addMany(dates); - assert(cr1.ok) - assert(cr2.ok) - assert(cr3.ok) + assert(cr1.ok); + assert(cr2.ok); + assert(cr3.ok); - const val1 = -100 - const val2 = "updated" - const val3 = new Date("2016-01-01") + const val1 = -100; + const val2 = "updated"; + const val3 = new Date("2016-01-01"); - const updateCr1 = await db.numbers.updateOne(val1) - const updateCr2 = await db.strings.updateOne(val2) - const updateCr3 = await db.dates.updateOne(val3) + const updateCr1 = await db.numbers.updateOne(val1); + const updateCr2 = await db.strings.updateOne(val2); + const updateCr3 = await db.dates.updateOne(val3); - assert(updateCr1.ok) - assert(updateCr2.ok) - assert(updateCr3.ok) + assert(updateCr1.ok); + assert(updateCr2.ok); + assert(updateCr3.ok); - const { result: [d1, ...ds1] } = await db.numbers.getMany() - const { result: [d2, ...ds2] } = await db.strings.getMany() - const { result: [d3, ...ds3] } = await db.dates.getMany() + const { result: [d1, ...ds1] } = await db.numbers.getMany(); + const { result: [d2, ...ds2] } = await db.strings.getMany(); + const { result: [d3, ...ds3] } = await db.dates.getMany(); - assertEquals(d1.value, val1) - ds1.forEach((doc) => assertNotEquals(doc.value, val1)) + assertEquals(d1.value, val1); + ds1.forEach((doc) => assertNotEquals(doc.value, val1)); - assertEquals(d2.value, val2) - ds2.forEach((doc) => assertNotEquals(doc.value, val2)) + assertEquals(d2.value, val2); + ds2.forEach((doc) => assertNotEquals(doc.value, val2)); - assertEquals(d3.value, val3) - ds3.forEach((doc) => assertNotEquals(doc.value, val3)) - }) + assertEquals(d3.value, val3); + ds3.forEach((doc) => assertNotEquals(doc.value, val3)); + }); }, - ) + ); await t.step("Should successfully parse and update", async () => { await useDb(async (db) => { - const users = generateUsers(10) - let assertion = true + const users = generateUsers(10); + let assertion = true; - const cr = await db.zs_users.addMany(users) - assert(cr.ok) + const cr = await db.zs_users.addMany(users); + assert(cr.ok); - await db.zs_users.updateOne(mockUser1).catch(() => assertion = false) + await db.zs_users.updateOne(mockUser1).catch(() => assertion = false); - assert(assertion) - }) - }) + assert(assertion); + }); + }); await t.step("Should fail to parse and update document", async () => { await useDb(async (db) => { - const users = generateUsers(10) - let assertion = false + const users = generateUsers(10); + let assertion = false; - const cr = await db.zs_users.addMany(users) - assert(cr.ok) + const cr = await db.zs_users.addMany(users); + assert(cr.ok); - await db.zs_users.updateOne(mockUserInvalid).catch(() => assertion = true) + await db.zs_users.updateOne(mockUserInvalid).catch(() => + assertion = true + ); - assert(assertion) - }) - }) -}) + assert(assertion); + }); + }); +}); diff --git a/tests/serialized_collection/upsert.test.ts b/tests/serialized_collection/upsert.test.ts index 46c9553..a5b5259 100644 --- a/tests/serialized_collection/upsert.test.ts +++ b/tests/serialized_collection/upsert.test.ts @@ -1,35 +1,35 @@ -import { assert } from "../test.deps.ts" -import { mockUser1, mockUser2, mockUser3 } from "../mocks.ts" -import type { User } from "../models.ts" -import { useDb } from "../utils.ts" +import { assert } from "../test.deps.ts"; +import { mockUser1, mockUser2, mockUser3 } from "../mocks.ts"; +import type { User } from "../models.ts"; +import { useDb } from "../utils.ts"; Deno.test("serialized_collection - upsert", async (t) => { await t.step("Should set new doucment entry by id", async () => { await useDb(async (db) => { - const id = "id" + const id = "id"; const cr = await db.s_users.upsert({ id: id, set: mockUser2, update: mockUser3, - }) + }); - assert(cr.ok) + assert(cr.ok); - const doc = await db.s_users.find(id) - assert(doc !== null) - assert(doc.value.username === mockUser2.username) - }) - }) + const doc = await db.s_users.find(id); + assert(doc !== null); + assert(doc.value.username === mockUser2.username); + }); + }); await t.step( "Should update existing document entry by id using shallow merge", async () => { await useDb(async (db) => { - const id = "id" + const id = "id"; - const cr1 = await db.s_users.set(id, mockUser1) - assert(cr1.ok) + const cr1 = await db.s_users.set(id, mockUser1); + assert(cr1.ok); const updateData: Partial = { address: { @@ -37,7 +37,7 @@ Deno.test("serialized_collection - upsert", async (t) => { city: "London", houseNr: null, }, - } + }; const cr2 = await db.s_users.upsert({ id: id, @@ -45,30 +45,30 @@ Deno.test("serialized_collection - upsert", async (t) => { update: updateData, }, { strategy: "merge-shallow", - }) - - assert(cr2.ok) - - const doc = await db.s_users.find(id) - assert(doc !== null) - assert(doc.value.username === mockUser1.username) - assert(doc.value.age === mockUser1.age) - assert(doc.value.address.city === updateData.address?.city) - assert(doc.value.address.country === updateData.address.country) - assert(doc.value.address.houseNr === updateData.address.houseNr) - assert(doc.value.address.street === undefined) - }) + }); + + assert(cr2.ok); + + const doc = await db.s_users.find(id); + assert(doc !== null); + assert(doc.value.username === mockUser1.username); + assert(doc.value.age === mockUser1.age); + assert(doc.value.address.city === updateData.address?.city); + assert(doc.value.address.country === updateData.address.country); + assert(doc.value.address.houseNr === updateData.address.houseNr); + assert(doc.value.address.street === undefined); + }); }, - ) + ); await t.step( "Should update existing document entry by id using deep merge", async () => { await useDb(async (db) => { - const id = "id" + const id = "id"; - const cr1 = await db.s_users.set(id, mockUser1) - assert(cr1.ok) + const cr1 = await db.s_users.set(id, mockUser1); + assert(cr1.ok); const updateData: Partial = { address: { @@ -76,7 +76,7 @@ Deno.test("serialized_collection - upsert", async (t) => { city: "London", houseNr: null, }, - } + }; const cr2 = await db.s_users.upsert({ id: id, @@ -84,30 +84,30 @@ Deno.test("serialized_collection - upsert", async (t) => { update: updateData, }, { strategy: "merge", - }) - - assert(cr2.ok) - - const doc = await db.s_users.find(id) - assert(doc !== null) - assert(doc.value.username === mockUser1.username) - assert(doc.value.age === mockUser1.age) - assert(doc.value.address.city === updateData.address?.city) - assert(doc.value.address.country === updateData.address.country) - assert(doc.value.address.houseNr === updateData.address.houseNr) - assert(doc.value.address.street === mockUser1.address.street) - }) + }); + + assert(cr2.ok); + + const doc = await db.s_users.find(id); + assert(doc !== null); + assert(doc.value.username === mockUser1.username); + assert(doc.value.age === mockUser1.age); + assert(doc.value.address.city === updateData.address?.city); + assert(doc.value.address.country === updateData.address.country); + assert(doc.value.address.houseNr === updateData.address.houseNr); + assert(doc.value.address.street === mockUser1.address.street); + }); }, - ) + ); await t.step( "Should update existing document entry by id using replace", async () => { await useDb(async (db) => { - const id = "id" + const id = "id"; - const cr1 = await db.s_users.set(id, mockUser1) - assert(cr1.ok) + const cr1 = await db.s_users.set(id, mockUser1); + assert(cr1.ok); const cr2 = await db.s_users.upsert({ id: id, @@ -115,19 +115,19 @@ Deno.test("serialized_collection - upsert", async (t) => { update: mockUser3, }, { strategy: "replace", - }) - - assert(cr2.ok) - - const doc = await db.s_users.find(id) - assert(doc !== null) - assert(doc.value.username === mockUser3.username) - assert(doc.value.age === mockUser3.age) - assert(doc.value.address.city === mockUser3.address?.city) - assert(doc.value.address.country === mockUser3.address.country) - assert(doc.value.address.houseNr === mockUser3.address.houseNr) - assert(doc.value.address.street === mockUser3.address.street) - }) + }); + + assert(cr2.ok); + + const doc = await db.s_users.find(id); + assert(doc !== null); + assert(doc.value.username === mockUser3.username); + assert(doc.value.age === mockUser3.age); + assert(doc.value.address.city === mockUser3.address?.city); + assert(doc.value.address.country === mockUser3.address.country); + assert(doc.value.address.houseNr === mockUser3.address.houseNr); + assert(doc.value.address.street === mockUser3.address.street); + }); }, - ) -}) + ); +}); diff --git a/tests/serialized_collection/watch.test.ts b/tests/serialized_collection/watch.test.ts index bea84f6..98ab772 100644 --- a/tests/serialized_collection/watch.test.ts +++ b/tests/serialized_collection/watch.test.ts @@ -1,70 +1,70 @@ -import { assert } from "../test.deps.ts" -import { mockUser1, mockUser2, mockUser3 } from "../mocks.ts" -import { sleep, useDb } from "../utils.ts" -import type { Document } from "../../mod.ts" -import type { User } from "../models.ts" +import { assert } from "../test.deps.ts"; +import { mockUser1, mockUser2, mockUser3 } from "../mocks.ts"; +import { sleep, useDb } from "../utils.ts"; +import type { Document } from "../../mod.ts"; +import type { User } from "../models.ts"; Deno.test("serialized_collection - watch", async (t) => { await t.step("Should receive all document updates", async () => { await useDb(async (db) => { - const id = "id" - const docs: (Document | null)[] = [] + const id = "id"; + const docs: (Document | null)[] = []; const { promise, cancel } = db.s_users.watch(id, (doc) => { - docs.push(doc) - }) + docs.push(doc); + }); - await db.s_users.set(id, mockUser1) - await sleep(500) - await db.s_users.set(id, mockUser2, { overwrite: true }) - await sleep(500) - await db.s_users.update(id, mockUser3) - await sleep(500) - await db.s_users.delete(id) - await sleep(500) + await db.s_users.set(id, mockUser1); + await sleep(500); + await db.s_users.set(id, mockUser2, { overwrite: true }); + await sleep(500); + await db.s_users.update(id, mockUser3); + await sleep(500); + await db.s_users.delete(id); + await sleep(500); - assert(docs.some((doc) => doc?.value.username === mockUser1.username)) - assert(docs.some((doc) => doc?.value.username === mockUser2.username)) - assert(docs.some((doc) => doc?.value.username === mockUser3.username)) - assert(docs.some((doc) => doc === null)) + assert(docs.some((doc) => doc?.value.username === mockUser1.username)); + assert(docs.some((doc) => doc?.value.username === mockUser2.username)); + assert(docs.some((doc) => doc?.value.username === mockUser3.username)); + assert(docs.some((doc) => doc === null)); - await cancel() - await promise - }) - }) + await cancel(); + await promise; + }); + }); await t.step("Should not receive unrelated document updates", async () => { await useDb(async (db) => { - const id1 = "id1" - const id2 = "id2" - let count = 0 - let username = "" - let lastDoc: any + const id1 = "id1"; + const id2 = "id2"; + let count = 0; + let username = ""; + let lastDoc: any; const { promise, cancel } = db.s_users.watch(id1, (doc) => { - count++ - lastDoc = doc + count++; + lastDoc = doc; if (doc?.value.username) { - username = doc.value.username + username = doc.value.username; } - }) + }); - await db.s_users.set(id2, mockUser1) - await sleep(500) - await db.s_users.set(id2, mockUser2, { overwrite: true }) - await sleep(500) - await db.s_users.update(id2, mockUser3) - await sleep(500) - await db.s_users.delete(id2) - await sleep(500) + await db.s_users.set(id2, mockUser1); + await sleep(500); + await db.s_users.set(id2, mockUser2, { overwrite: true }); + await sleep(500); + await db.s_users.update(id2, mockUser3); + await sleep(500); + await db.s_users.delete(id2); + await sleep(500); // Account for initial invocation - assert(count === 1) - assert(username === "") - assert(lastDoc === null) + assert(count === 1); + assert(username === ""); + assert(lastDoc === null); - await cancel() - await promise - }) - }) -}) + await cancel(); + await promise; + }); + }); +}); diff --git a/tests/serialized_collection/watchMany.test.ts b/tests/serialized_collection/watchMany.test.ts index 1abea51..d5c173d 100644 --- a/tests/serialized_collection/watchMany.test.ts +++ b/tests/serialized_collection/watchMany.test.ts @@ -1,8 +1,8 @@ -import { assert } from "../test.deps.ts" -import { mockUser1, mockUser2, mockUser3 } from "../mocks.ts" -import { sleep, useDb } from "../utils.ts" -import type { Document } from "../../mod.ts" -import type { User } from "../models.ts" +import { assert } from "../test.deps.ts"; +import { mockUser1, mockUser2, mockUser3 } from "../mocks.ts"; +import { sleep, useDb } from "../utils.ts"; +import type { Document } from "../../mod.ts"; +import type { User } from "../models.ts"; Deno.test({ name: "serialized_collection - watchMany", @@ -11,112 +11,112 @@ Deno.test({ fn: async (t) => { await t.step("Should receive all document updates", async () => { await useDb(async (db) => { - const id1 = "id1" - const id2 = "id2" - const id3 = "id3" - const snapshots: (Document | null)[][] = [] + const id1 = "id1"; + const id2 = "id2"; + const id3 = "id3"; + const snapshots: (Document | null)[][] = []; - await db.s_users.set(id3, mockUser1) + await db.s_users.set(id3, mockUser1); const { promise, cancel } = db.s_users.watchMany( [id1, id2, id3], (docs) => { - snapshots.push(docs) + snapshots.push(docs); }, - ) + ); - const cr1 = await db.s_users.set(id1, mockUser1) - await sleep(500) - await db.s_users.delete(id1) - await sleep(500) - const cr2 = await db.s_users.set(id2, mockUser2, { overwrite: true }) - await sleep(500) - const cr3 = await db.s_users.update(id3, mockUser3) - await sleep(500) + const cr1 = await db.s_users.set(id1, mockUser1); + await sleep(500); + await db.s_users.delete(id1); + await sleep(500); + const cr2 = await db.s_users.set(id2, mockUser2, { overwrite: true }); + await sleep(500); + const cr3 = await db.s_users.update(id3, mockUser3); + await sleep(500); - assert(cr1.ok) - assert(cr2.ok) - assert(cr3.ok) + assert(cr1.ok); + assert(cr2.ok); + assert(cr3.ok); assert(snapshots.some((docs) => { - const doc1 = docs.find((doc) => doc?.id === id1) ?? null - const doc2 = docs.find((doc) => doc?.id === id2) ?? null - const doc3 = docs.find((doc) => doc?.id === id3) ?? null + const doc1 = docs.find((doc) => doc?.id === id1) ?? null; + const doc2 = docs.find((doc) => doc?.id === id2) ?? null; + const doc3 = docs.find((doc) => doc?.id === id3) ?? null; return doc1?.value.username === mockUser1.username && doc2 === null && - doc3?.value.username === mockUser1.username - })) + doc3?.value.username === mockUser1.username; + })); assert(snapshots.some((docs) => { - const doc1 = docs.find((doc) => doc?.id === id1) ?? null - const doc2 = docs.find((doc) => doc?.id === id2) ?? null - const doc3 = docs.find((doc) => doc?.id === id3) ?? null + const doc1 = docs.find((doc) => doc?.id === id1) ?? null; + const doc2 = docs.find((doc) => doc?.id === id2) ?? null; + const doc3 = docs.find((doc) => doc?.id === id3) ?? null; return doc1 === null && doc2 === null && - doc3?.value.username === mockUser1.username - })) + doc3?.value.username === mockUser1.username; + })); assert(snapshots.some((docs) => { - const doc1 = docs.find((doc) => doc?.id === id1) ?? null - const doc2 = docs.find((doc) => doc?.id === id2) ?? null - const doc3 = docs.find((doc) => doc?.id === id3) ?? null + const doc1 = docs.find((doc) => doc?.id === id1) ?? null; + const doc2 = docs.find((doc) => doc?.id === id2) ?? null; + const doc3 = docs.find((doc) => doc?.id === id3) ?? null; return doc1 === null && doc2?.value.username === mockUser2.username && - doc3?.value.username === mockUser1.username - })) + doc3?.value.username === mockUser1.username; + })); assert(snapshots.some((docs) => { - const doc1 = docs.find((doc) => doc?.id === id1) ?? null - const doc2 = docs.find((doc) => doc?.id === id2) ?? null - const doc3 = docs.find((doc) => doc?.id === id3) ?? null + const doc1 = docs.find((doc) => doc?.id === id1) ?? null; + const doc2 = docs.find((doc) => doc?.id === id2) ?? null; + const doc3 = docs.find((doc) => doc?.id === id3) ?? null; return doc1 === null && doc2?.value.username === mockUser2.username && - doc3?.value.username === mockUser3.username - })) + doc3?.value.username === mockUser3.username; + })); - await cancel() - await promise - }) - }) + await cancel(); + await promise; + }); + }); await t.step("Should not receive unrelated document updates", async () => { await useDb(async (db) => { - const id1 = "id1" - const id2 = "id1" - const id3 = "id1" - const id4 = "id4" - let count = 0 - let lastDocs: any[] = [] + const id1 = "id1"; + const id2 = "id1"; + const id3 = "id1"; + const id4 = "id4"; + let count = 0; + let lastDocs: any[] = []; const { promise, cancel } = db.s_users.watchMany( [id1, id2, id3], (docs) => { - count++ - lastDocs = docs + count++; + lastDocs = docs; }, - ) - - await db.s_users.set(id4, mockUser1) - await sleep(500) - await db.s_users.set(id4, mockUser2, { overwrite: true }) - await sleep(500) - await db.s_users.update(id4, mockUser3) - await sleep(500) - await db.s_users.delete(id4) - await sleep(500) - - assert(count === 1) - assert(lastDocs[0] === null) - assert(lastDocs[1] === null) - assert(lastDocs[2] === null) - - await cancel() - await promise - }) - }) + ); + + await db.s_users.set(id4, mockUser1); + await sleep(500); + await db.s_users.set(id4, mockUser2, { overwrite: true }); + await sleep(500); + await db.s_users.update(id4, mockUser3); + await sleep(500); + await db.s_users.delete(id4); + await sleep(500); + + assert(count === 1); + assert(lastDocs[0] === null); + assert(lastDocs[1] === null); + assert(lastDocs[2] === null); + + await cancel(); + await promise; + }); + }); }, -}) +}); diff --git a/tests/serialized_indexable_collection/add.test.ts b/tests/serialized_indexable_collection/add.test.ts index 963f921..bc08b12 100644 --- a/tests/serialized_indexable_collection/add.test.ts +++ b/tests/serialized_indexable_collection/add.test.ts @@ -1,57 +1,57 @@ -import { assert } from "../test.deps.ts" -import { mockUserInvalid } from "../mocks.ts" -import { generateLargeUsers, useDb } from "../utils.ts" +import { assert } from "../test.deps.ts"; +import { mockUserInvalid } from "../mocks.ts"; +import { generateLargeUsers, useDb } from "../utils.ts"; -const [user] = generateLargeUsers(1) +const [user] = generateLargeUsers(1); Deno.test("serialized_indexable_collection - add", async (t) => { await t.step("Should add new document entry to collection", async () => { await useDb(async (db) => { - const cr = await db.is_users.add(user) - assert(cr.ok) + const cr = await db.is_users.add(user); + assert(cr.ok); - const doc = await db.is_users.find(cr.id) - assert(doc !== null) - assert(doc.value.username === user.username) - }) - }) + const doc = await db.is_users.find(cr.id); + assert(doc !== null); + assert(doc.value.username === user.username); + }); + }); await t.step( "Should not add new document with colliding primary index", async () => { await useDb(async (db) => { - const cr1 = await db.is_users.add(user) - const cr2 = await db.is_users.add(user) - const count = await db.is_users.count() - assert(cr1.ok) - assert(!cr2.ok) - assert(count === 1) - }) + const cr1 = await db.is_users.add(user); + const cr2 = await db.is_users.add(user); + const count = await db.is_users.count(); + assert(cr1.ok); + assert(!cr2.ok); + assert(count === 1); + }); }, - ) + ); await t.step( "Should successfully parse and add new document entry to collection", async () => { await useDb(async (db) => { - const cr = await db.zis_users.add(user) - assert(cr.ok) + const cr = await db.zis_users.add(user); + assert(cr.ok); - const doc = await db.zis_users.find(cr.id) - assert(doc !== null) - assert(doc.value.username === user.username) - }) + const doc = await db.zis_users.find(cr.id); + assert(doc !== null); + assert(doc.value.username === user.username); + }); }, - ) + ); await t.step( "Should fail parse and add new document entry to collection", async () => { await useDb(async (db) => { - let assertion = false - await db.zis_users.add(mockUserInvalid).catch(() => assertion = true) - assert(assertion) - }) + let assertion = false; + await db.zis_users.add(mockUserInvalid).catch(() => assertion = true); + assert(assertion); + }); }, - ) -}) + ); +}); diff --git a/tests/serialized_indexable_collection/addMany.test.ts b/tests/serialized_indexable_collection/addMany.test.ts index 37a11c9..ee59972 100644 --- a/tests/serialized_indexable_collection/addMany.test.ts +++ b/tests/serialized_indexable_collection/addMany.test.ts @@ -1,73 +1,73 @@ -import { assert } from "../test.deps.ts" -import { generateInvalidUsers, generateLargeUsers, useDb } from "../utils.ts" +import { assert } from "../test.deps.ts"; +import { generateInvalidUsers, generateLargeUsers, useDb } from "../utils.ts"; -const [user] = generateLargeUsers(1) +const [user] = generateLargeUsers(1); Deno.test("serialized_indexable_collection - addMany", async (t) => { await t.step( "Should successfully add 1000 documents to the collection", async () => { await useDb(async (db) => { - const users = generateLargeUsers(1_000) - const cr = await db.is_users.addMany(users) - assert(cr.ok) + const users = generateLargeUsers(1_000); + const cr = await db.is_users.addMany(users); + assert(cr.ok); - const { result } = await db.is_users.getMany() + const { result } = await db.is_users.getMany(); - assert(result.length === users.length) + assert(result.length === users.length); assert( users.every((user) => result.some((doc) => doc.value.username === user.username) ), - ) - }) + ); + }); }, - ) + ); await t.step( "Should not add documents with colliding primary indices", async () => { await useDb(async (db) => { - const cr = await db.is_users.addMany([user, user]) - const count = await db.is_users.count() + const cr = await db.is_users.addMany([user, user]); + const count = await db.is_users.count(); - assert(!cr.ok) - assert(count === 1) - }) + assert(!cr.ok); + assert(count === 1); + }); }, - ) + ); await t.step( "Should successfully parse and add 1000 documents to the collection", async () => { await useDb(async (db) => { - const users = generateLargeUsers(1_000) - const cr = await db.zis_users.addMany(users) - assert(cr.ok) + const users = generateLargeUsers(1_000); + const cr = await db.zis_users.addMany(users); + assert(cr.ok); - const { result } = await db.zis_users.getMany() + const { result } = await db.zis_users.getMany(); - assert(result.length === users.length) + assert(result.length === users.length); assert( users.every((user) => result.some((doc) => doc.value.username === user.username) ), - ) - }) + ); + }); }, - ) + ); await t.step( "Should fail to parse and add 1000 documents to the collection", async () => { await useDb(async (db) => { - const users = generateInvalidUsers(1_000) - let assertion = false + const users = generateInvalidUsers(1_000); + let assertion = false; - await db.zis_users.addMany(users).catch(() => assertion = true) + await db.zis_users.addMany(users).catch(() => assertion = true); - assert(assertion) - }) + assert(assertion); + }); }, - ) -}) + ); +}); diff --git a/tests/serialized_indexable_collection/count.test.ts b/tests/serialized_indexable_collection/count.test.ts index b2286ea..cc37f31 100644 --- a/tests/serialized_indexable_collection/count.test.ts +++ b/tests/serialized_indexable_collection/count.test.ts @@ -1,21 +1,21 @@ -import { assert } from "../test.deps.ts" -import { generateLargeUsers, useDb } from "../utils.ts" +import { assert } from "../test.deps.ts"; +import { generateLargeUsers, useDb } from "../utils.ts"; Deno.test("serialized_indexable_collection - count", async (t) => { await t.step( "Should correctly count total number of documents in the collection", async () => { await useDb(async (db) => { - const count1 = await db.is_users.count() - assert(count1 === 0) + const count1 = await db.is_users.count(); + assert(count1 === 0); - const users = generateLargeUsers(1_000) - const cr = await db.is_users.addMany(users) - assert(cr.ok) + const users = generateLargeUsers(1_000); + const cr = await db.is_users.addMany(users); + assert(cr.ok); - const count2 = await db.is_users.count() - assert(count2 === users.length) - }) + const count2 = await db.is_users.count(); + assert(count2 === users.length); + }); }, - ) -}) + ); +}); diff --git a/tests/serialized_indexable_collection/countBySecondaryIndex.test.ts b/tests/serialized_indexable_collection/countBySecondaryIndex.test.ts index e923049..ed967f0 100644 --- a/tests/serialized_indexable_collection/countBySecondaryIndex.test.ts +++ b/tests/serialized_indexable_collection/countBySecondaryIndex.test.ts @@ -1,6 +1,6 @@ -import { assert } from "../test.deps.ts" -import { mockUser1, mockUser2, mockUser3 } from "../mocks.ts" -import { useDb } from "../utils.ts" +import { assert } from "../test.deps.ts"; +import { mockUser1, mockUser2, mockUser3 } from "../mocks.ts"; +import { useDb } from "../utils.ts"; Deno.test("serialized_indexable_collection - countBySecondaryIndex", async (t) => { await t.step( @@ -10,19 +10,19 @@ Deno.test("serialized_indexable_collection - countBySecondaryIndex", async (t) = const count1 = await db.is_users.countBySecondaryIndex( "age", mockUser1.age, - ) - assert(count1 === 0) + ); + assert(count1 === 0); - const cr = await db.is_users.addMany([mockUser1, mockUser2, mockUser3]) - assert(cr.ok) + const cr = await db.is_users.addMany([mockUser1, mockUser2, mockUser3]); + assert(cr.ok); const count2 = await db.is_users.countBySecondaryIndex( "age", mockUser1.age, - ) + ); - assert(count2 === 2) - }) + assert(count2 === 2); + }); }, - ) -}) + ); +}); diff --git a/tests/serialized_indexable_collection/countBySecondaryOrder.test.ts b/tests/serialized_indexable_collection/countBySecondaryOrder.test.ts index 9676c87..faa5cd8 100644 --- a/tests/serialized_indexable_collection/countBySecondaryOrder.test.ts +++ b/tests/serialized_indexable_collection/countBySecondaryOrder.test.ts @@ -1,6 +1,6 @@ -import { assert } from "../test.deps.ts" -import { mockUser1, mockUser2, mockUsersWithAlteredAge } from "../mocks.ts" -import { useDb } from "../utils.ts" +import { assert } from "../test.deps.ts"; +import { mockUser1, mockUser2, mockUsersWithAlteredAge } from "../mocks.ts"; +import { useDb } from "../utils.ts"; Deno.test("serialized_indexable_collection - countBySecondaryOrder", async (t) => { await t.step( @@ -9,11 +9,11 @@ Deno.test("serialized_indexable_collection - countBySecondaryOrder", async (t) = await useDb(async (db) => { const count1 = await db.is_users.countBySecondaryOrder( "age", - ) - assert(count1 === 0) + ); + assert(count1 === 0); - const cr = await db.is_users.addMany(mockUsersWithAlteredAge) - assert(cr.ok) + const cr = await db.is_users.addMany(mockUsersWithAlteredAge); + assert(cr.ok); const count2 = await db.is_users.countBySecondaryOrder( "age", @@ -21,9 +21,9 @@ Deno.test("serialized_indexable_collection - countBySecondaryOrder", async (t) = limit: 1, filter: (doc) => doc.value.age < mockUser1.age, }, - ) + ); - assert(count2 === 1) + assert(count2 === 1); const count3 = await db.is_users.countBySecondaryOrder( "age", @@ -31,13 +31,13 @@ Deno.test("serialized_indexable_collection - countBySecondaryOrder", async (t) = limit: 2, filter: (doc) => doc.value.age < mockUser2.age, }, - ) + ); - assert(count3 === 2) + assert(count3 === 2); - const count4 = await db.is_users.countBySecondaryOrder("age") - assert(count4 === 3) - }) + const count4 = await db.is_users.countBySecondaryOrder("age"); + assert(count4 === 3); + }); }, - ) -}) + ); +}); diff --git a/tests/serialized_indexable_collection/delete.test.ts b/tests/serialized_indexable_collection/delete.test.ts index f670f37..158d8e4 100644 --- a/tests/serialized_indexable_collection/delete.test.ts +++ b/tests/serialized_indexable_collection/delete.test.ts @@ -1,68 +1,68 @@ -import { assert } from "../test.deps.ts" -import { generateLargeUsers, useDb } from "../utils.ts" +import { assert } from "../test.deps.ts"; +import { generateLargeUsers, useDb } from "../utils.ts"; -const [user] = generateLargeUsers(1) +const [user] = generateLargeUsers(1); Deno.test("serialized_indexable_collection - delete", async (t) => { await t.step( "Should successfully delete a document and its indices from the collection", async () => { await useDb(async (db) => { - const cr = await db.is_users.add(user) - const count1 = await db.is_users.count() + const cr = await db.is_users.add(user); + const count1 = await db.is_users.count(); const byPrimary1 = await db.is_users.findByPrimaryIndex( "username", user.username, - ) + ); const bySecondary1 = await db.is_users.findBySecondaryIndex( "age", user.age, - ) + ); - assert(cr.ok) - assert(count1 === 1) - assert(byPrimary1?.id === cr.id) - assert(bySecondary1.result.at(0)?.id === cr.id) + assert(cr.ok); + assert(count1 === 1); + assert(byPrimary1?.id === cr.id); + assert(bySecondary1.result.at(0)?.id === cr.id); - await db.is_users.delete(cr.id) + await db.is_users.delete(cr.id); - const count2 = await db.is_users.count() - const doc = await db.is_users.find(cr.id) + const count2 = await db.is_users.count(); + const doc = await db.is_users.find(cr.id); const byPrimary2 = await db.is_users.findByPrimaryIndex( "username", user.username, - ) + ); const bySecondary2 = await db.is_users.findBySecondaryIndex( "age", user.age, - ) + ); - assert(count2 === 0) - assert(doc === null) - assert(byPrimary2 === null) - assert(bySecondary2.result.length === 0) - }) + assert(count2 === 0); + assert(doc === null); + assert(byPrimary2 === null); + assert(bySecondary2.result.length === 0); + }); }, - ) + ); await t.step( "Should successfully delete 1000 documents from the collection", async () => { await useDb(async (db) => { - const users = generateLargeUsers(1_000) - const cr = await db.is_users.addMany(users) - const count1 = await db.is_users.count() + const users = generateLargeUsers(1_000); + const cr = await db.is_users.addMany(users); + const count1 = await db.is_users.count(); - assert(cr.ok) - assert(count1 === users.length) + assert(cr.ok); + assert(count1 === users.length); - const { result: ids } = await db.is_users.map((doc) => doc.id) + const { result: ids } = await db.is_users.map((doc) => doc.id); - await db.is_users.delete(...ids) + await db.is_users.delete(...ids); - const count2 = await db.is_users.count() - assert(count2 === 0) - }) + const count2 = await db.is_users.count(); + assert(count2 === 0); + }); }, - ) -}) + ); +}); diff --git a/tests/serialized_indexable_collection/deleteByPrimaryIndex.test.ts b/tests/serialized_indexable_collection/deleteByPrimaryIndex.test.ts index 616c149..5eecf03 100644 --- a/tests/serialized_indexable_collection/deleteByPrimaryIndex.test.ts +++ b/tests/serialized_indexable_collection/deleteByPrimaryIndex.test.ts @@ -1,47 +1,47 @@ -import { assert } from "../test.deps.ts" -import { generateLargeUsers, useDb } from "../utils.ts" +import { assert } from "../test.deps.ts"; +import { generateLargeUsers, useDb } from "../utils.ts"; -const [user] = generateLargeUsers(1) +const [user] = generateLargeUsers(1); Deno.test("serialized_indexable_collection - deleteByPrimaryIndex", async (t) => { await t.step( "Should successfully delete a document and its indices from the collection by primary index", async () => { await useDb(async (db) => { - const cr = await db.is_users.add(user) - const count1 = await db.is_users.count() + const cr = await db.is_users.add(user); + const count1 = await db.is_users.count(); const byPrimary1 = await db.is_users.findByPrimaryIndex( "username", user.username, - ) + ); const bySecondary1 = await db.is_users.findBySecondaryIndex( "age", user.age, - ) + ); - assert(cr.ok) - assert(count1 === 1) - assert(byPrimary1?.id === cr.id) - assert(bySecondary1.result.at(0)?.id === cr.id) + assert(cr.ok); + assert(count1 === 1); + assert(byPrimary1?.id === cr.id); + assert(bySecondary1.result.at(0)?.id === cr.id); - await db.is_users.deleteByPrimaryIndex("username", user.username) + await db.is_users.deleteByPrimaryIndex("username", user.username); - const count2 = await db.is_users.count() - const doc = await db.is_users.find(cr.id) + const count2 = await db.is_users.count(); + const doc = await db.is_users.find(cr.id); const byPrimary2 = await db.is_users.findByPrimaryIndex( "username", user.username, - ) + ); const bySecondary2 = await db.is_users.findBySecondaryIndex( "age", user.age, - ) + ); - assert(count2 === 0) - assert(doc === null) - assert(byPrimary2 === null) - assert(bySecondary2.result.length === 0) - }) + assert(count2 === 0); + assert(doc === null); + assert(byPrimary2 === null); + assert(bySecondary2.result.length === 0); + }); }, - ) -}) + ); +}); diff --git a/tests/serialized_indexable_collection/deleteBySecondaryIndex.test.ts b/tests/serialized_indexable_collection/deleteBySecondaryIndex.test.ts index 9457226..690f4c4 100644 --- a/tests/serialized_indexable_collection/deleteBySecondaryIndex.test.ts +++ b/tests/serialized_indexable_collection/deleteBySecondaryIndex.test.ts @@ -1,49 +1,49 @@ -import { assert } from "../test.deps.ts" -import { generateLargeUsers, useDb } from "../utils.ts" +import { assert } from "../test.deps.ts"; +import { generateLargeUsers, useDb } from "../utils.ts"; -const [user1, user2] = generateLargeUsers(2) +const [user1, user2] = generateLargeUsers(2); Deno.test("serialized_indexable_collection - deleteBySecondaryIndex", async (t) => { await t.step( "Should delete documents and indices from the collection by secondary index", async () => { await useDb(async (db) => { - const cr1 = await db.is_users.add(user1) - const cr2 = await db.is_users.add(user2) - assert(cr1.ok && cr2.ok) - const count1 = await db.is_users.count() + const cr1 = await db.is_users.add(user1); + const cr2 = await db.is_users.add(user2); + assert(cr1.ok && cr2.ok); + const count1 = await db.is_users.count(); const byPrimary1 = await db.is_users.findByPrimaryIndex( "username", user1.username, - ) + ); const bySecondary1 = await db.is_users.findBySecondaryIndex( "age", user1.age, - ) + ); - assert(count1 === 2) - assert(byPrimary1?.value.username === user1.username) - assert(bySecondary1.result.length === 2) + assert(count1 === 2); + assert(byPrimary1?.value.username === user1.username); + assert(bySecondary1.result.length === 2); - await db.is_users.deleteBySecondaryIndex("age", user1.age) + await db.is_users.deleteBySecondaryIndex("age", user1.age); - const count2 = await db.is_users.count() + const count2 = await db.is_users.count(); const byPrimary2 = await db.is_users.findByPrimaryIndex( "username", user1.username, - ) + ); const bySecondary2 = await db.is_users.findBySecondaryIndex( "age", user1.age, - ) + ); - assert(count2 === 0) - assert(byPrimary2 === null) - assert(bySecondary2.result.length === 0) - }) + assert(count2 === 0); + assert(byPrimary2 === null); + assert(bySecondary2.result.length === 0); + }); }, - ) -}) + ); +}); diff --git a/tests/serialized_indexable_collection/deleteMany.test.ts b/tests/serialized_indexable_collection/deleteMany.test.ts index 81ee5fd..ba88203 100644 --- a/tests/serialized_indexable_collection/deleteMany.test.ts +++ b/tests/serialized_indexable_collection/deleteMany.test.ts @@ -1,46 +1,46 @@ -import { assert } from "../test.deps.ts" -import { generateLargeUsers, useDb } from "../utils.ts" +import { assert } from "../test.deps.ts"; +import { generateLargeUsers, useDb } from "../utils.ts"; Deno.test("serialized_indexable_collection - deleteMany", async (t) => { await t.step( "Should delete all documents and indices from the collection", async () => { await useDb(async (db) => { - const users = generateLargeUsers(1_000) - const user1 = users[0] + const users = generateLargeUsers(1_000); + const user1 = users[0]; - const cr = await db.is_users.addMany(users) - const count1 = await db.is_users.count() + const cr = await db.is_users.addMany(users); + const count1 = await db.is_users.count(); const byPrimary1 = await db.is_users.findByPrimaryIndex( "username", user1.username, - ) + ); const bySecondary1 = await db.is_users.findBySecondaryIndex( "age", user1.age, - ) + ); - assert(cr.ok) - assert(count1 === users.length) - assert(byPrimary1?.value.username === user1.username) - assert(bySecondary1.result.length > 0) + assert(cr.ok); + assert(count1 === users.length); + assert(byPrimary1?.value.username === user1.username); + assert(bySecondary1.result.length > 0); - await db.is_users.deleteMany() + await db.is_users.deleteMany(); - const count2 = await db.is_users.count() + const count2 = await db.is_users.count(); const byPrimary2 = await db.is_users.findByPrimaryIndex( "username", user1.username, - ) + ); const bySecondary2 = await db.is_users.findBySecondaryIndex( "age", user1.age, - ) + ); - assert(count2 === 0) - assert(byPrimary2 === null) - assert(bySecondary2.result.length === 0) - }) + assert(count2 === 0); + assert(byPrimary2 === null); + assert(bySecondary2.result.length === 0); + }); }, - ) -}) + ); +}); diff --git a/tests/serialized_indexable_collection/deleteManyBySecondaryOrder.test.ts b/tests/serialized_indexable_collection/deleteManyBySecondaryOrder.test.ts index 47d2e83..af4b171 100644 --- a/tests/serialized_indexable_collection/deleteManyBySecondaryOrder.test.ts +++ b/tests/serialized_indexable_collection/deleteManyBySecondaryOrder.test.ts @@ -1,28 +1,28 @@ -import { assert, assertEquals } from "../test.deps.ts" -import { mockUser2, mockUsersWithAlteredAge } from "../mocks.ts" -import { useDb } from "../utils.ts" +import { assert, assertEquals } from "../test.deps.ts"; +import { mockUser2, mockUsersWithAlteredAge } from "../mocks.ts"; +import { useDb } from "../utils.ts"; Deno.test("serialized_indexable_collection - deleteManyBySecondaryOrder", async (t) => { await t.step( "Should delete documents and indices from the collection by secondary order", async () => { await useDb(async (db) => { - const cr = await db.is_users.addMany(mockUsersWithAlteredAge) - const count1 = await db.is_users.count() - assert(cr.ok) - assertEquals(count1, mockUsersWithAlteredAge.length) + const cr = await db.is_users.addMany(mockUsersWithAlteredAge); + const count1 = await db.is_users.count(); + assert(cr.ok); + assertEquals(count1, mockUsersWithAlteredAge.length); await db.is_users.deleteManyBySecondaryOrder("age", { limit: mockUsersWithAlteredAge.length - 1, - }) + }); - const count2 = await db.is_users.count() - const doc = await db.is_users.getOne() + const count2 = await db.is_users.count(); + const doc = await db.is_users.getOne(); - assertEquals(count2, 1) - assertEquals(doc?.value.username, mockUser2.username) - assertEquals(doc?.value.address, mockUser2.address) - }) + assertEquals(count2, 1); + assertEquals(doc?.value.username, mockUser2.username); + assertEquals(doc?.value.address, mockUser2.address); + }); }, - ) -}) + ); +}); diff --git a/tests/serialized_indexable_collection/enqueue.test.ts b/tests/serialized_indexable_collection/enqueue.test.ts index 1be8e01..b6521e3 100644 --- a/tests/serialized_indexable_collection/enqueue.test.ts +++ b/tests/serialized_indexable_collection/enqueue.test.ts @@ -4,101 +4,101 @@ import { type KvValue, model, type QueueMessage, -} from "../../mod.ts" -import { createHandlerId } from "../../src/utils.ts" -import { assert } from "../test.deps.ts" -import type { User } from "../models.ts" -import { createResolver, useDb, useKv } from "../utils.ts" +} from "../../mod.ts"; +import { createHandlerId } from "../../src/utils.ts"; +import { assert } from "../test.deps.ts"; +import type { User } from "../models.ts"; +import { createResolver, useDb, useKv } from "../utils.ts"; Deno.test("serialized_indexable_collection - enqueue", async (t) => { await t.step("Should enqueue message with string data", async () => { await useKv(async (kv) => { - const data = "data" - const undeliveredId = "undelivered" - const sleeper = createResolver() + const data = "data"; + const undeliveredId = "undelivered"; + const sleeper = createResolver(); const db = kvdex(kv, { is_users: collection(model(), { indices: {}, serialized: true }), - }) + }); - const handlerId = createHandlerId(db.is_users._keys.base, undefined) + const handlerId = createHandlerId(db.is_users._keys.base, undefined); - let assertion = false + let assertion = false; const listener = kv.listenQueue((msg) => { - const qMsg = msg as QueueMessage - assertion = qMsg.__handlerId__ === handlerId && qMsg.__data__ === data - sleeper.resolve() - }) + const qMsg = msg as QueueMessage; + assertion = qMsg.__handlerId__ === handlerId && qMsg.__data__ === data; + sleeper.resolve(); + }); await db.is_users.enqueue(data, { idsIfUndelivered: [undeliveredId], - }) + }); - await sleeper.promise + await sleeper.promise; - const undelivered = await db.is_users.findUndelivered(undeliveredId) - assert(assertion || typeof undelivered?.value === typeof data) + const undelivered = await db.is_users.findUndelivered(undeliveredId); + assert(assertion || typeof undelivered?.value === typeof data); - return async () => await listener - }) - }) + return async () => await listener; + }); + }); await t.step("Should enqueue message in correct topic", async () => { await useDb(async (db) => { - const data = "data" - const undeliveredId = "undelivered" - const topic = "topic" - const sleeper = createResolver() + const data = "data"; + const undeliveredId = "undelivered"; + const topic = "topic"; + const sleeper = createResolver(); - let assertion1 = false - let assertion2 = true + let assertion1 = false; + let assertion2 = true; const l1 = db.is_users.listenQueue(() => { - assertion1 = true - sleeper.resolve() - }, { topic }) + assertion1 = true; + sleeper.resolve(); + }, { topic }); - const l2 = db.is_users.listenQueue(() => assertion2 = false) + const l2 = db.is_users.listenQueue(() => assertion2 = false); await db.is_users.enqueue("data", { idsIfUndelivered: [undeliveredId], topic, - }) + }); - await sleeper.promise + await sleeper.promise; - const undelivered = await db.i_users.findUndelivered(undeliveredId) - assert(assertion1 || typeof undelivered?.value === typeof data) - assert(assertion2) + const undelivered = await db.i_users.findUndelivered(undeliveredId); + assert(assertion1 || typeof undelivered?.value === typeof data); + assert(assertion2); - return async () => await Promise.all([l1, l2]) - }) - }) + return async () => await Promise.all([l1, l2]); + }); + }); await t.step("Should enqueue message with undefined data", async () => { await useDb(async (db) => { - const data = undefined - const undeliveredId = "undelivered" - const sleeper = createResolver() + const data = undefined; + const undeliveredId = "undelivered"; + const sleeper = createResolver(); - let assertion = false + let assertion = false; const listener = db.is_users.listenQueue((msg) => { - assertion = msg === data - sleeper.resolve() - }) + assertion = msg === data; + sleeper.resolve(); + }); await db.is_users.enqueue(data, { idsIfUndelivered: [undeliveredId], - }) + }); - await sleeper.promise + await sleeper.promise; - const undelivered = await db.is_users.findUndelivered(undeliveredId) - assert(assertion || typeof undelivered?.value === typeof data) + const undelivered = await db.is_users.findUndelivered(undeliveredId); + assert(assertion || typeof undelivered?.value === typeof data); - return async () => await listener - }) - }) -}) + return async () => await listener; + }); + }); +}); diff --git a/tests/serialized_indexable_collection/find.test.ts b/tests/serialized_indexable_collection/find.test.ts index a8bbd32..9fff355 100644 --- a/tests/serialized_indexable_collection/find.test.ts +++ b/tests/serialized_indexable_collection/find.test.ts @@ -1,26 +1,26 @@ -import { assert } from "../test.deps.ts" -import { generateLargeUsers, useDb } from "../utils.ts" +import { assert } from "../test.deps.ts"; +import { generateLargeUsers, useDb } from "../utils.ts"; -const [user] = generateLargeUsers(1) +const [user] = generateLargeUsers(1); Deno.test("serialized_indexable_collection - find", async (t) => { await t.step("Should find document by id", async () => { await useDb(async (db) => { - const id = "id" + const id = "id"; - const cr = await db.is_users.set(id, user) - assert(cr.ok) + const cr = await db.is_users.set(id, user); + assert(cr.ok); - const doc = await db.is_users.find(id) - assert(doc !== null) - assert(doc.value.username === user.username) - }) - }) + const doc = await db.is_users.find(id); + assert(doc !== null); + assert(doc.value.username === user.username); + }); + }); await t.step("Should not find document by non-existing id", async () => { await useDb(async (db) => { - const doc = await db.is_users.find("123") - assert(doc === null) - }) - }) -}) + const doc = await db.is_users.find("123"); + assert(doc === null); + }); + }); +}); diff --git a/tests/serialized_indexable_collection/findByPrimaryIndex.test.ts b/tests/serialized_indexable_collection/findByPrimaryIndex.test.ts index cadd418..032b0fa 100644 --- a/tests/serialized_indexable_collection/findByPrimaryIndex.test.ts +++ b/tests/serialized_indexable_collection/findByPrimaryIndex.test.ts @@ -1,52 +1,52 @@ -import { assert } from "../test.deps.ts" -import { TransformUserModel } from "../models.ts" -import { generateLargeUsers, useDb } from "../utils.ts" +import { assert } from "../test.deps.ts"; +import { TransformUserModel } from "../models.ts"; +import { generateLargeUsers, useDb } from "../utils.ts"; -const [user] = generateLargeUsers(1) +const [user] = generateLargeUsers(1); Deno.test("serialized_indexable_collection - findByPrimaryIndex", async (t) => { await t.step("Should find document by primary index", async () => { await useDb(async (db) => { - const id = "id" + const id = "id"; - const cr = await db.is_users.set(id, user) - assert(cr.ok) + const cr = await db.is_users.set(id, user); + assert(cr.ok); const doc = await db.is_users.findByPrimaryIndex( "username", user.username, - ) + ); - assert(doc !== null) - assert(doc.value.username === user.username) - }) - }) + assert(doc !== null); + assert(doc.value.username === user.username); + }); + }); await t.step("Should not find document by non-existing index", async () => { await useDb(async (db) => { const doc = await db.is_users.findByPrimaryIndex( "username", user.username, - ) - assert(doc === null) - }) - }) + ); + assert(doc === null); + }); + }); await t.step( "Should find document by asymmetric model primary index", async () => { await useDb(async (db) => { - const transformed = TransformUserModel.parse(user) + const transformed = TransformUserModel.parse(user); - const cr = await db.ais_users.add(user) - assert(cr.ok) + const cr = await db.ais_users.add(user); + assert(cr.ok); const doc = await db.ais_users.findByPrimaryIndex( "name", transformed.name, - ) - assert(doc?.value.name === transformed.name) - }) + ); + assert(doc?.value.name === transformed.name); + }); }, - ) -}) + ); +}); diff --git a/tests/serialized_indexable_collection/findBySecondaryIndex.test.ts b/tests/serialized_indexable_collection/findBySecondaryIndex.test.ts index 8038b69..ccf8469 100644 --- a/tests/serialized_indexable_collection/findBySecondaryIndex.test.ts +++ b/tests/serialized_indexable_collection/findBySecondaryIndex.test.ts @@ -1,61 +1,61 @@ -import { assert } from "../test.deps.ts" -import { mockUser1, mockUser2 } from "../mocks.ts" -import { TransformUserModel } from "../models.ts" -import { useDb } from "../utils.ts" +import { assert } from "../test.deps.ts"; +import { mockUser1, mockUser2 } from "../mocks.ts"; +import { TransformUserModel } from "../models.ts"; +import { useDb } from "../utils.ts"; Deno.test("serialized_indexable_collection - findBySecondaryIndex", async (t) => { await t.step("Should find documents by secondary index", async () => { await useDb(async (db) => { - const cr1 = await db.i_users.add(mockUser1) - const cr2 = await db.i_users.add(mockUser2) - assert(cr1.ok && cr2.ok) + const cr1 = await db.i_users.add(mockUser1); + const cr2 = await db.i_users.add(mockUser2); + assert(cr1.ok && cr2.ok); const bySecondary = await db.i_users.findBySecondaryIndex( "age", mockUser1.age, - ) + ); - assert(bySecondary.result.length === 2) - assert(bySecondary.result.some((doc) => doc.id === cr1.id)) - assert(bySecondary.result.some((doc) => doc.id === cr2.id)) - }) - }) + assert(bySecondary.result.length === 2); + assert(bySecondary.result.some((doc) => doc.id === cr1.id)); + assert(bySecondary.result.some((doc) => doc.id === cr2.id)); + }); + }); await t.step( "Should not find documents by non-existing secondary index", async () => { await useDb(async (db) => { - const cr1 = await db.i_users.add(mockUser1) - const cr2 = await db.i_users.add(mockUser2) - assert(cr1.ok && cr2.ok) + const cr1 = await db.i_users.add(mockUser1); + const cr2 = await db.i_users.add(mockUser2); + assert(cr1.ok && cr2.ok); const bySecondary = await db.i_users.findBySecondaryIndex( "age", -1, - ) + ); - assert(bySecondary.result.length === 0) - }) + assert(bySecondary.result.length === 0); + }); }, - ) + ); await t.step( "Should find documents by asymmetric model secondary index", async () => { await useDb(async (db) => { - const t1 = TransformUserModel.parse(mockUser1) - const t2 = TransformUserModel.parse(mockUser2) + const t1 = TransformUserModel.parse(mockUser1); + const t2 = TransformUserModel.parse(mockUser2); - const cr = await db.ai_users.addMany([mockUser1, mockUser2]) - assert(cr.ok) + const cr = await db.ai_users.addMany([mockUser1, mockUser2]); + assert(cr.ok); const { result } = await db.ai_users.findBySecondaryIndex( "decadeAge", t1.decadeAge, - ) - result.some((doc) => doc.value.name === t1.name) - result.some((doc) => doc.value.name === t2.name) - }) + ); + result.some((doc) => doc.value.name === t1.name); + result.some((doc) => doc.value.name === t2.name); + }); }, - ) -}) + ); +}); diff --git a/tests/serialized_indexable_collection/findMany.test.ts b/tests/serialized_indexable_collection/findMany.test.ts index 7c91b56..d61922f 100644 --- a/tests/serialized_indexable_collection/findMany.test.ts +++ b/tests/serialized_indexable_collection/findMany.test.ts @@ -1,32 +1,32 @@ -import { assert } from "../test.deps.ts" -import { generateLargeUsers, useDb } from "../utils.ts" +import { assert } from "../test.deps.ts"; +import { generateLargeUsers, useDb } from "../utils.ts"; Deno.test("serialized_indexable_collection - findMany", async (t) => { await t.step("Should find all documents", async () => { await useDb(async (db) => { - const users = generateLargeUsers(1_000) - const cr = await db.is_users.addMany(users) - assert(cr.ok) + const users = generateLargeUsers(1_000); + const cr = await db.is_users.addMany(users); + assert(cr.ok); - const { result: docs } = await db.is_users.getMany() + const { result: docs } = await db.is_users.getMany(); - assert(docs.length === users.length) + assert(docs.length === users.length); assert( users.every((user) => docs.some((doc) => doc.value.username === user.username) ), - ) - }) - }) + ); + }); + }); await t.step("Should not find any documents", async () => { await useDb(async (db) => { - const users = generateLargeUsers(10) - const cr = await db.is_users.addMany(users) - assert(cr.ok) + const users = generateLargeUsers(10); + const cr = await db.is_users.addMany(users); + assert(cr.ok); - const docs = await db.is_users.findMany(["", "", ""]) - assert(docs.length === 0) - }) - }) -}) + const docs = await db.is_users.findMany(["", "", ""]); + assert(docs.length === 0); + }); + }); +}); diff --git a/tests/serialized_indexable_collection/forEach.test.ts b/tests/serialized_indexable_collection/forEach.test.ts index 534f3ee..6d87a96 100644 --- a/tests/serialized_indexable_collection/forEach.test.ts +++ b/tests/serialized_indexable_collection/forEach.test.ts @@ -1,27 +1,27 @@ -import type { Document } from "../../mod.ts" -import { assert } from "../test.deps.ts" -import type { User } from "../models.ts" -import { generateLargeUsers, useDb } from "../utils.ts" +import type { Document } from "../../mod.ts"; +import { assert } from "../test.deps.ts"; +import type { User } from "../models.ts"; +import { generateLargeUsers, useDb } from "../utils.ts"; Deno.test("serialized_indexable_collection - forEach", async (t) => { await t.step( "Should run callback function for each document in the collection", async () => { await useDb(async (db) => { - const users = generateLargeUsers(1_000) - const cr = await db.is_users.addMany(users) - assert(cr.ok) + const users = generateLargeUsers(1_000); + const cr = await db.is_users.addMany(users); + assert(cr.ok); - const docs: Document[] = [] - await db.is_users.forEach((doc) => docs.push(doc)) + const docs: Document[] = []; + await db.is_users.forEach((doc) => docs.push(doc)); - assert(docs.length === users.length) + assert(docs.length === users.length); assert( users.every((user) => docs.some((doc) => doc.value.username === user.username) ), - ) - }) + ); + }); }, - ) -}) + ); +}); diff --git a/tests/serialized_indexable_collection/forEachBySecondaryIndex.test.ts b/tests/serialized_indexable_collection/forEachBySecondaryIndex.test.ts index ba28d37..a265368 100644 --- a/tests/serialized_indexable_collection/forEachBySecondaryIndex.test.ts +++ b/tests/serialized_indexable_collection/forEachBySecondaryIndex.test.ts @@ -1,29 +1,29 @@ -import type { Document } from "../../mod.ts" -import { assert } from "../test.deps.ts" -import { mockUser1, mockUser2, mockUser3 } from "../mocks.ts" -import type { User } from "../models.ts" -import { useDb } from "../utils.ts" +import type { Document } from "../../mod.ts"; +import { assert } from "../test.deps.ts"; +import { mockUser1, mockUser2, mockUser3 } from "../mocks.ts"; +import type { User } from "../models.ts"; +import { useDb } from "../utils.ts"; Deno.test("serialized_indexable_collection - forEachBySecondaryIndex", async (t) => { await t.step( "Should run callback function for each document in the collection by secondary index", async () => { await useDb(async (db) => { - const cr = await db.is_users.addMany([mockUser1, mockUser2, mockUser3]) - assert(cr.ok) + const cr = await db.is_users.addMany([mockUser1, mockUser2, mockUser3]); + assert(cr.ok); - const docs: Document[] = [] + const docs: Document[] = []; await db.is_users.forEachBySecondaryIndex( "age", mockUser1.age, (doc) => docs.push(doc), - ) + ); - assert(docs.length === 2) - assert(docs.some((doc) => doc.value.username === mockUser1.username)) - assert(docs.some((doc) => doc.value.username === mockUser2.username)) - assert(!docs.some((doc) => doc.value.username === mockUser3.username)) - }) + assert(docs.length === 2); + assert(docs.some((doc) => doc.value.username === mockUser1.username)); + assert(docs.some((doc) => doc.value.username === mockUser2.username)); + assert(!docs.some((doc) => doc.value.username === mockUser3.username)); + }); }, - ) -}) + ); +}); diff --git a/tests/serialized_indexable_collection/forEachBySecondaryOrder.test.ts b/tests/serialized_indexable_collection/forEachBySecondaryOrder.test.ts index 3fdb6ad..498104b 100644 --- a/tests/serialized_indexable_collection/forEachBySecondaryOrder.test.ts +++ b/tests/serialized_indexable_collection/forEachBySecondaryOrder.test.ts @@ -1,32 +1,32 @@ -import type { Document } from "../../mod.ts" -import { assert } from "../test.deps.ts" +import type { Document } from "../../mod.ts"; +import { assert } from "../test.deps.ts"; import { mockUser1, mockUser2, mockUser3, mockUsersWithAlteredAge, -} from "../mocks.ts" -import type { User } from "../models.ts" -import { useDb } from "../utils.ts" +} from "../mocks.ts"; +import type { User } from "../models.ts"; +import { useDb } from "../utils.ts"; Deno.test("serialized_indexable_collection - forEachBySecondaryOrder", async (t) => { await t.step( "Should run callback function for each document in the collection by secondary order", async () => { await useDb(async (db) => { - const cr = await db.is_users.addMany(mockUsersWithAlteredAge) - assert(cr.ok) + const cr = await db.is_users.addMany(mockUsersWithAlteredAge); + assert(cr.ok); - const docs: Document[] = [] + const docs: Document[] = []; await db.is_users.forEachBySecondaryOrder( "age", (doc) => docs.push(doc), - ) + ); - assert(docs[0].value.username === mockUser3.username) - assert(docs[1].value.username === mockUser1.username) - assert(docs[2].value.username === mockUser2.username) - }) + assert(docs[0].value.username === mockUser3.username); + assert(docs[1].value.username === mockUser1.username); + assert(docs[2].value.username === mockUser2.username); + }); }, - ) -}) + ); +}); diff --git a/tests/serialized_indexable_collection/getMany.test.ts b/tests/serialized_indexable_collection/getMany.test.ts index e77566f..c200421 100644 --- a/tests/serialized_indexable_collection/getMany.test.ts +++ b/tests/serialized_indexable_collection/getMany.test.ts @@ -1,20 +1,20 @@ -import { assert } from "../test.deps.ts" -import { generateLargeUsers, useDb } from "../utils.ts" +import { assert } from "../test.deps.ts"; +import { generateLargeUsers, useDb } from "../utils.ts"; Deno.test("serialized_indexable_collection - getMany", async (t) => { await t.step("Should get all documents", async () => { await useDb(async (db) => { - const users = generateLargeUsers(1_000) - const cr = await db.is_users.addMany(users) - assert(cr.ok) + const users = generateLargeUsers(1_000); + const cr = await db.is_users.addMany(users); + assert(cr.ok); - const { result } = await db.is_users.getMany() - assert(result.length === users.length) + const { result } = await db.is_users.getMany(); + assert(result.length === users.length); assert( users.every((user) => result.some((doc) => doc.value.username === user.username) ), - ) - }) - }) -}) + ); + }); + }); +}); diff --git a/tests/serialized_indexable_collection/getManyBySecondaryOrder.test.ts b/tests/serialized_indexable_collection/getManyBySecondaryOrder.test.ts index 69fcc48..a0b0608 100644 --- a/tests/serialized_indexable_collection/getManyBySecondaryOrder.test.ts +++ b/tests/serialized_indexable_collection/getManyBySecondaryOrder.test.ts @@ -3,21 +3,21 @@ import { mockUser2, mockUser3, mockUsersWithAlteredAge, -} from "../mocks.ts" -import { assert } from "../test.deps.ts" -import { useDb } from "../utils.ts" +} from "../mocks.ts"; +import { assert } from "../test.deps.ts"; +import { useDb } from "../utils.ts"; Deno.test("serialized_indexable_collection - getManyBySecondaryOrder", async (t) => { await t.step("Should get all documents by secondary order", async () => { await useDb(async (db) => { - const cr = await db.is_users.addMany(mockUsersWithAlteredAge) - assert(cr.ok) + const cr = await db.is_users.addMany(mockUsersWithAlteredAge); + assert(cr.ok); - const { result } = await db.is_users.getManyBySecondaryOrder("age") - assert(result.length === mockUsersWithAlteredAge.length) - assert(result[0].value.username === mockUser3.username) - assert(result[1].value.username === mockUser1.username) - assert(result[2].value.username === mockUser2.username) - }) - }) -}) + const { result } = await db.is_users.getManyBySecondaryOrder("age"); + assert(result.length === mockUsersWithAlteredAge.length); + assert(result[0].value.username === mockUser3.username); + assert(result[1].value.username === mockUser1.username); + assert(result[2].value.username === mockUser2.username); + }); + }); +}); diff --git a/tests/serialized_indexable_collection/getOne.test.ts b/tests/serialized_indexable_collection/getOne.test.ts index ba0c87c..0c9bfc0 100644 --- a/tests/serialized_indexable_collection/getOne.test.ts +++ b/tests/serialized_indexable_collection/getOne.test.ts @@ -1,20 +1,20 @@ -import { assert } from "../test.deps.ts" -import { sleep, useDb } from "../utils.ts" -import { mockUser1, mockUser2 } from "../mocks.ts" +import { assert } from "../test.deps.ts"; +import { sleep, useDb } from "../utils.ts"; +import { mockUser1, mockUser2 } from "../mocks.ts"; Deno.test("serialized_indexable_collection - getOne", async (t) => { await t.step("Should get only one document", async () => { await useDb(async (db) => { - const cr1 = await db.is_users.add(mockUser1) - await sleep(10) - const cr2 = await db.is_users.add(mockUser2) + const cr1 = await db.is_users.add(mockUser1); + await sleep(10); + const cr2 = await db.is_users.add(mockUser2); - assert(cr1.ok) - assert(cr2.ok) + assert(cr1.ok); + assert(cr2.ok); - const doc = await db.is_users.getOne() - assert(doc !== null) - assert(doc.value.username === mockUser1.username) - }) - }) -}) + const doc = await db.is_users.getOne(); + assert(doc !== null); + assert(doc.value.username === mockUser1.username); + }); + }); +}); diff --git a/tests/serialized_indexable_collection/getOneBySecondaryIndex.test.ts b/tests/serialized_indexable_collection/getOneBySecondaryIndex.test.ts index 1e4a4aa..ac1466b 100644 --- a/tests/serialized_indexable_collection/getOneBySecondaryIndex.test.ts +++ b/tests/serialized_indexable_collection/getOneBySecondaryIndex.test.ts @@ -1,26 +1,26 @@ -import { assert } from "../test.deps.ts" -import { sleep, useDb } from "../utils.ts" -import { mockUser1, mockUser2 } from "../mocks.ts" +import { assert } from "../test.deps.ts"; +import { sleep, useDb } from "../utils.ts"; +import { mockUser1, mockUser2 } from "../mocks.ts"; Deno.test("serialized_indexable_collection - getOneBySecondaryIndex", async (t) => { await t.step( "Should get only one document by a secondary index", async () => { await useDb(async (db) => { - const cr1 = await db.is_users.add(mockUser1) - await sleep(10) - const cr2 = await db.is_users.add(mockUser2) + const cr1 = await db.is_users.add(mockUser1); + await sleep(10); + const cr2 = await db.is_users.add(mockUser2); - assert(cr1.ok) - assert(cr2.ok) + assert(cr1.ok); + assert(cr2.ok); const doc = await db.is_users.getOneBySecondaryIndex( "age", mockUser2.age, - ) - assert(doc !== null) - assert(doc.value.username === mockUser1.username) - }) + ); + assert(doc !== null); + assert(doc.value.username === mockUser1.username); + }); }, - ) -}) + ); +}); diff --git a/tests/serialized_indexable_collection/getOneBySecondaryOrder.test.ts b/tests/serialized_indexable_collection/getOneBySecondaryOrder.test.ts index b97ab98..d590c6f 100644 --- a/tests/serialized_indexable_collection/getOneBySecondaryOrder.test.ts +++ b/tests/serialized_indexable_collection/getOneBySecondaryOrder.test.ts @@ -1,17 +1,17 @@ -import { assert } from "../test.deps.ts" -import { useDb } from "../utils.ts" -import { mockUser3, mockUsersWithAlteredAge } from "../mocks.ts" +import { assert } from "../test.deps.ts"; +import { useDb } from "../utils.ts"; +import { mockUser3, mockUsersWithAlteredAge } from "../mocks.ts"; Deno.test("serialized_indexable_collection - getOneBySecondaryOrder", async (t) => { await t.step("Should get only one document by secondary order", async () => { await useDb(async (db) => { - const cr = await db.is_users.addMany(mockUsersWithAlteredAge) + const cr = await db.is_users.addMany(mockUsersWithAlteredAge); - assert(cr.ok) + assert(cr.ok); - const doc = await db.is_users.getOneBySecondaryOrder("age") - assert(doc !== null) - assert(doc.value.username === mockUser3.username) - }) - }) -}) + const doc = await db.is_users.getOneBySecondaryOrder("age"); + assert(doc !== null); + assert(doc.value.username === mockUser3.username); + }); + }); +}); diff --git a/tests/serialized_indexable_collection/history.test.ts b/tests/serialized_indexable_collection/history.test.ts index a59afc6..da547d3 100644 --- a/tests/serialized_indexable_collection/history.test.ts +++ b/tests/serialized_indexable_collection/history.test.ts @@ -1,8 +1,8 @@ -import { collection, kvdex, model } from "../../mod.ts" -import { assert } from "../test.deps.ts" -import { generateLargeUsers, sleep, useKv } from "../utils.ts" -import { mockUser1, mockUser2, mockUser3 } from "../mocks.ts" -import type { User } from "../models.ts" +import { collection, kvdex, model } from "../../mod.ts"; +import { assert } from "../test.deps.ts"; +import { generateLargeUsers, sleep, useKv } from "../utils.ts"; +import { mockUser1, mockUser2, mockUser3 } from "../mocks.ts"; +import type { User } from "../models.ts"; Deno.test("serialized_indexable_collection - history", async (t) => { await t.step( @@ -18,27 +18,27 @@ Deno.test("serialized_indexable_collection - history", async (t) => { age: "secondary", }, }), - }) + }); - const id = "id" - await db.users.set(id, mockUser1, { overwrite: true }) - await sleep(10) - await db.users.set(id, mockUser2, { overwrite: true }) - await sleep(10) - await db.users.set(id, mockUser3, { overwrite: true }) + const id = "id"; + await db.users.set(id, mockUser1, { overwrite: true }); + await sleep(10); + await db.users.set(id, mockUser2, { overwrite: true }); + await sleep(10); + await db.users.set(id, mockUser3, { overwrite: true }); - const { result: [h1, h2, h3] } = await db.users.findHistory(id) - assert(h1.type === "write") - assert(h1.value.username === mockUser1.username) - assert(h1.timestamp.valueOf() <= h2.timestamp.valueOf()) - assert(h2.type === "write") - assert(h2.value.username === mockUser2.username) - assert(h2.timestamp.valueOf() <= h3.timestamp.valueOf()) - assert(h3.type === "write") - assert(h3.value.username === mockUser3.username) - }) + const { result: [h1, h2, h3] } = await db.users.findHistory(id); + assert(h1.type === "write"); + assert(h1.value.username === mockUser1.username); + assert(h1.timestamp.valueOf() <= h2.timestamp.valueOf()); + assert(h2.type === "write"); + assert(h2.value.username === mockUser2.username); + assert(h2.timestamp.valueOf() <= h3.timestamp.valueOf()); + assert(h3.type === "write"); + assert(h3.value.username === mockUser3.username); + }); }, - ) + ); await t.step( "Should persist history of multiple inserts in correct order after deleting", @@ -53,35 +53,35 @@ Deno.test("serialized_indexable_collection - history", async (t) => { age: "secondary", }, }), - }) + }); - const id = "id" - await db.users.set(id, mockUser1, { overwrite: true }) - await sleep(10) - await db.users.delete(id) - await sleep(10) - await db.users.set(id, mockUser2, { overwrite: true }) - await sleep(10) - await db.users.set(id, mockUser3, { overwrite: true }) - await sleep(10) - await db.users.delete(id) + const id = "id"; + await db.users.set(id, mockUser1, { overwrite: true }); + await sleep(10); + await db.users.delete(id); + await sleep(10); + await db.users.set(id, mockUser2, { overwrite: true }); + await sleep(10); + await db.users.set(id, mockUser3, { overwrite: true }); + await sleep(10); + await db.users.delete(id); - const { result: [h1, h2, h3, h4, h5] } = await db.users.findHistory(id) - assert(h1.type === "write") - assert(h1.value.username === mockUser1.username) - assert(h1.timestamp.valueOf() <= h2.timestamp.valueOf()) - assert(h2.type === "delete") - assert(h2.timestamp.valueOf() <= h3.timestamp.valueOf()) - assert(h3.type === "write") - assert(h3.value.username === mockUser2.username) - assert(h3.timestamp.valueOf() <= h4.timestamp.valueOf()) - assert(h4.type === "write") - assert(h4.value.username === mockUser3.username) - assert(h4.timestamp.valueOf() <= h5.timestamp.valueOf()) - assert(h5.type === "delete") - }) + const { result: [h1, h2, h3, h4, h5] } = await db.users.findHistory(id); + assert(h1.type === "write"); + assert(h1.value.username === mockUser1.username); + assert(h1.timestamp.valueOf() <= h2.timestamp.valueOf()); + assert(h2.type === "delete"); + assert(h2.timestamp.valueOf() <= h3.timestamp.valueOf()); + assert(h3.type === "write"); + assert(h3.value.username === mockUser2.username); + assert(h3.timestamp.valueOf() <= h4.timestamp.valueOf()); + assert(h4.type === "write"); + assert(h4.value.username === mockUser3.username); + assert(h4.timestamp.valueOf() <= h5.timestamp.valueOf()); + assert(h5.type === "delete"); + }); }, - ) + ); await t.step( "Should persist history of multiple inserts and updates in correct order", @@ -96,27 +96,27 @@ Deno.test("serialized_indexable_collection - history", async (t) => { age: "secondary", }, }), - }) + }); - const id = "id" - await db.users.set(id, mockUser1, { overwrite: true }) - await sleep(10) - await db.users.update(id, mockUser2) - await sleep(10) - await db.users.update(id, mockUser3) + const id = "id"; + await db.users.set(id, mockUser1, { overwrite: true }); + await sleep(10); + await db.users.update(id, mockUser2); + await sleep(10); + await db.users.update(id, mockUser3); - const { result: [h1, h2, h3] } = await db.users.findHistory(id) - assert(h1.type === "write") - assert(h1.value.username === mockUser1.username) - assert(h1.timestamp.valueOf() <= h2.timestamp.valueOf()) - assert(h2.type === "write") - assert(h2.value.username === mockUser2.username) - assert(h2.timestamp.valueOf() <= h3.timestamp.valueOf()) - assert(h3.type === "write") - assert(h3.value.username === mockUser3.username) - }) + const { result: [h1, h2, h3] } = await db.users.findHistory(id); + assert(h1.type === "write"); + assert(h1.value.username === mockUser1.username); + assert(h1.timestamp.valueOf() <= h2.timestamp.valueOf()); + assert(h2.type === "write"); + assert(h2.value.username === mockUser2.username); + assert(h2.timestamp.valueOf() <= h3.timestamp.valueOf()); + assert(h3.type === "write"); + assert(h3.value.username === mockUser3.username); + }); }, - ) + ); await t.step( "Should persist version history of insert and delete by deleteMany()", @@ -131,30 +131,30 @@ Deno.test("serialized_indexable_collection - history", async (t) => { age: "secondary", }, }), - }) + }); - const id = "id" - await db.users.set(id, mockUser1, { overwrite: true }) - await sleep(10) - await db.users.deleteMany() - await sleep(10) - await db.users.set(id, mockUser2, { overwrite: true }) - await sleep(10) - await db.users.deleteMany({ filter: () => true }) + const id = "id"; + await db.users.set(id, mockUser1, { overwrite: true }); + await sleep(10); + await db.users.deleteMany(); + await sleep(10); + await db.users.set(id, mockUser2, { overwrite: true }); + await sleep(10); + await db.users.deleteMany({ filter: () => true }); - const { result: [h1, h2, h3, h4] } = await db.users.findHistory(id) - assert(h1.type === "write") - assert(h1.value.username === mockUser1.username) - assert(h1.timestamp.valueOf() <= h2.timestamp.valueOf()) - assert(h2.type === "delete") - assert(h2.timestamp.valueOf() <= h3.timestamp.valueOf()) - assert(h3.type === "write") - assert(h3.value.username === mockUser2.username) - assert(h3.timestamp.valueOf() <= h4.timestamp.valueOf()) - assert(h4.type === "delete") - }) + const { result: [h1, h2, h3, h4] } = await db.users.findHistory(id); + assert(h1.type === "write"); + assert(h1.value.username === mockUser1.username); + assert(h1.timestamp.valueOf() <= h2.timestamp.valueOf()); + assert(h2.type === "delete"); + assert(h2.timestamp.valueOf() <= h3.timestamp.valueOf()); + assert(h3.type === "write"); + assert(h3.value.username === mockUser2.username); + assert(h3.timestamp.valueOf() <= h4.timestamp.valueOf()); + assert(h4.type === "delete"); + }); }, - ) + ); await t.step( "Should not find history", @@ -168,19 +168,19 @@ Deno.test("serialized_indexable_collection - history", async (t) => { age: "secondary", }, }), - }) + }); - const id = "id" - await db.users.set(id, mockUser1, { overwrite: true }) - await db.users.update(id, mockUser2) - await db.users.delete(id) - await db.users.deleteMany() + const id = "id"; + await db.users.set(id, mockUser1, { overwrite: true }); + await db.users.update(id, mockUser2); + await db.users.delete(id); + await db.users.deleteMany(); - const { result: history } = await db.users.findHistory(id) - assert(history.length === 0) - }) + const { result: history } = await db.users.findHistory(id); + assert(history.length === 0); + }); }, - ) + ); await t.step("Should find filtered history", async () => { await useKv(async (kv) => { @@ -193,39 +193,39 @@ Deno.test("serialized_indexable_collection - history", async (t) => { age: "secondary", }, }), - }) + }); - const id = "id" - await db.users.set(id, mockUser1, { overwrite: true }) - await db.users.delete(id) - await db.users.set(id, mockUser2, { overwrite: true }) - await db.users.update(id, mockUser3) + const id = "id"; + await db.users.set(id, mockUser1, { overwrite: true }); + await db.users.delete(id); + await db.users.set(id, mockUser2, { overwrite: true }); + await db.users.update(id, mockUser3); const { result: history1 } = await db.users.findHistory(id, { filter: (entry) => entry.type === "delete", - }) + }); const { result: history2 } = await db.users.findHistory(id, { filter: (entry) => entry.type === "write" && entry.value.age === mockUser1.age, - }) + }); - assert(history1.length === 1) - assert(history2.length === 2) + assert(history1.length === 1); + assert(history2.length === 2); assert( history2.some((h) => h.type === "write" && h.value.username === mockUser1.username ), - ) + ); assert( history2.some((h) => h.type === "write" && h.value.username === mockUser2.username ), - ) - }) - }) + ); + }); + }); await t.step("Should delete all document history", async () => { await useKv(async (kv) => { @@ -238,27 +238,27 @@ Deno.test("serialized_indexable_collection - history", async (t) => { age: "secondary", }, }), - }) + }); - const id = "id" - await db.users.set(id, mockUser1, { overwrite: true }) - await db.users.set(id, mockUser2, { overwrite: true }) - await db.users.set(id, mockUser3, { overwrite: true }) - const cr = await db.users.add(generateLargeUsers(1)[0]) + const id = "id"; + await db.users.set(id, mockUser1, { overwrite: true }); + await db.users.set(id, mockUser2, { overwrite: true }); + await db.users.set(id, mockUser3, { overwrite: true }); + const cr = await db.users.add(generateLargeUsers(1)[0]); - assert(cr.ok) + assert(cr.ok); - const { result: history1_1 } = await db.users.findHistory(id) - const { result: history1_2 } = await db.users.findHistory(cr.id) - assert(history1_1.length === 3) - assert(history1_2.length === 1) + const { result: history1_1 } = await db.users.findHistory(id); + const { result: history1_2 } = await db.users.findHistory(cr.id); + assert(history1_1.length === 3); + assert(history1_2.length === 1); - await db.users.deleteHistory(id) + await db.users.deleteHistory(id); - const { result: history2_1 } = await db.users.findHistory(id) - const { result: history2_2 } = await db.users.findHistory(cr.id) - assert(history2_1.length === 0) - assert(history2_2.length === 1) - }) - }) -}) + const { result: history2_1 } = await db.users.findHistory(id); + const { result: history2_2 } = await db.users.findHistory(cr.id); + assert(history2_1.length === 0); + assert(history2_2.length === 1); + }); + }); +}); diff --git a/tests/serialized_indexable_collection/listenQueue.test.ts b/tests/serialized_indexable_collection/listenQueue.test.ts index 70cba2c..8a571a4 100644 --- a/tests/serialized_indexable_collection/listenQueue.test.ts +++ b/tests/serialized_indexable_collection/listenQueue.test.ts @@ -4,41 +4,41 @@ import { type KvValue, model, type QueueMessage, -} from "../../mod.ts" +} from "../../mod.ts"; import { KVDEX_KEY_PREFIX, UNDELIVERED_KEY_PREFIX, -} from "../../src/constants.ts" -import { createHandlerId, extendKey } from "../../src/utils.ts" -import { assert } from "../test.deps.ts" -import type { User } from "../models.ts" -import { createResolver, sleep, useKv } from "../utils.ts" +} from "../../src/constants.ts"; +import { createHandlerId, extendKey } from "../../src/utils.ts"; +import { assert } from "../test.deps.ts"; +import type { User } from "../models.ts"; +import { createResolver, sleep, useKv } from "../utils.ts"; Deno.test("serialized_indexable_collection - listenQueue", async (t) => { await t.step("Should receive message with string data", async () => { await useKv(async (kv) => { - const data = "data" - const undeliveredId = "id" - const sleeper = createResolver() + const data = "data"; + const undeliveredId = "id"; + const sleeper = createResolver(); const db = kvdex(kv, { is_users: collection(model(), { indices: {}, serialize: "json" }), - }) + }); - const handlerId = createHandlerId(db.is_users._keys.base, undefined) + const handlerId = createHandlerId(db.is_users._keys.base, undefined); - let assertion = false + let assertion = false; const listener = db.is_users.listenQueue((msgData) => { - assertion = msgData === data - sleeper.resolve() - }) + assertion = msgData === data; + sleeper.resolve(); + }); const msg: QueueMessage = { __is_undefined__: false, __handlerId__: handlerId, __data__: data, - } + }; await kv.enqueue(msg, { keysIfUndelivered: [ @@ -48,36 +48,36 @@ Deno.test("serialized_indexable_collection - listenQueue", async (t) => { undeliveredId, ), ], - }) + }); - await sleeper.promise + await sleeper.promise; - const undelivered = await db.is_users.findUndelivered(undeliveredId) - assert(assertion || typeof undelivered?.value === typeof data) + const undelivered = await db.is_users.findUndelivered(undeliveredId); + assert(assertion || typeof undelivered?.value === typeof data); - return async () => await listener - }) - }) + return async () => await listener; + }); + }); await t.step("Should not receive db queue message", async () => { await useKv(async (kv) => { const db = kvdex(kv, { is_users: collection(model(), { indices: {}, serialize: "json" }), - }) + }); - let assertion = true + let assertion = true; const listener = db.is_users.listenQueue(() => { - assertion = false - }) + assertion = false; + }); - await db.enqueue("data") + await db.enqueue("data"); - await sleep(500) + await sleep(500); - assert(assertion) + assert(assertion); - return async () => await listener - }) - }) -}) + return async () => await listener; + }); + }); +}); diff --git a/tests/serialized_indexable_collection/map.test.ts b/tests/serialized_indexable_collection/map.test.ts index 639389d..46ee615 100644 --- a/tests/serialized_indexable_collection/map.test.ts +++ b/tests/serialized_indexable_collection/map.test.ts @@ -1,24 +1,24 @@ -import { assert } from "../test.deps.ts" -import { generateLargeUsers, useDb } from "../utils.ts" +import { assert } from "../test.deps.ts"; +import { generateLargeUsers, useDb } from "../utils.ts"; Deno.test("serialized_indexable_collection - map", async (t) => { await t.step( "Should run callback mapper function for each document in the collection", async () => { await useDb(async (db) => { - const users = generateLargeUsers(1_000) - const cr = await db.is_users.addMany(users) - assert(cr.ok) + const users = generateLargeUsers(1_000); + const cr = await db.is_users.addMany(users); + assert(cr.ok); - const { result } = await db.is_users.map((doc) => doc.value.username) + const { result } = await db.is_users.map((doc) => doc.value.username); - assert(result.length === users.length) + assert(result.length === users.length); assert( users.every((user) => result.some((username) => username === user.username) ), - ) - }) + ); + }); }, - ) -}) + ); +}); diff --git a/tests/serialized_indexable_collection/mapBySecondaryIndex.test.ts b/tests/serialized_indexable_collection/mapBySecondaryIndex.test.ts index f468723..0384ae2 100644 --- a/tests/serialized_indexable_collection/mapBySecondaryIndex.test.ts +++ b/tests/serialized_indexable_collection/mapBySecondaryIndex.test.ts @@ -1,26 +1,26 @@ -import { assert } from "../test.deps.ts" -import { mockUser1, mockUser2, mockUser3 } from "../mocks.ts" -import { useDb } from "../utils.ts" +import { assert } from "../test.deps.ts"; +import { mockUser1, mockUser2, mockUser3 } from "../mocks.ts"; +import { useDb } from "../utils.ts"; Deno.test("serialized_indexable_collection - mapBySecondaryIndex", async (t) => { await t.step( "Should run callback mapper function for each document in the collection by secondary index", async () => { await useDb(async (db) => { - const cr = await db.is_users.addMany([mockUser1, mockUser2, mockUser3]) - assert(cr.ok) + const cr = await db.is_users.addMany([mockUser1, mockUser2, mockUser3]); + assert(cr.ok); const { result } = await db.is_users.mapBySecondaryIndex( "age", mockUser1.age, (doc) => doc.value.username, - ) + ); - assert(result.length === 2) - assert(result.some((username) => username === mockUser1.username)) - assert(result.some((username) => username === mockUser2.username)) - assert(!result.some((username) => username === mockUser3.username)) - }) + assert(result.length === 2); + assert(result.some((username) => username === mockUser1.username)); + assert(result.some((username) => username === mockUser2.username)); + assert(!result.some((username) => username === mockUser3.username)); + }); }, - ) -}) + ); +}); diff --git a/tests/serialized_indexable_collection/mapBySecondaryOrder.test.ts b/tests/serialized_indexable_collection/mapBySecondaryOrder.test.ts index 6d661aa..5d38eac 100644 --- a/tests/serialized_indexable_collection/mapBySecondaryOrder.test.ts +++ b/tests/serialized_indexable_collection/mapBySecondaryOrder.test.ts @@ -1,29 +1,29 @@ -import { assert } from "../test.deps.ts" +import { assert } from "../test.deps.ts"; import { mockUser1, mockUser2, mockUser3, mockUsersWithAlteredAge, -} from "../mocks.ts" -import { useDb } from "../utils.ts" +} from "../mocks.ts"; +import { useDb } from "../utils.ts"; Deno.test("serialized_indexable_collection - mapBySecondaryOrder", async (t) => { await t.step( "Should run callback mapper function for each document in the collection by secondary order", async () => { await useDb(async (db) => { - const cr = await db.is_users.addMany(mockUsersWithAlteredAge) - assert(cr.ok) + const cr = await db.is_users.addMany(mockUsersWithAlteredAge); + assert(cr.ok); const { result } = await db.is_users.mapBySecondaryOrder( "age", (doc) => doc.value.username, - ) + ); - assert(result[0] === mockUser3.username) - assert(result[1] === mockUser1.username) - assert(result[2] === mockUser2.username) - }) + assert(result[0] === mockUser3.username); + assert(result[1] === mockUser1.username); + assert(result[2] === mockUser2.username); + }); }, - ) -}) + ); +}); diff --git a/tests/serialized_indexable_collection/properties.test.ts b/tests/serialized_indexable_collection/properties.test.ts index 1c63e87..bd89c94 100644 --- a/tests/serialized_indexable_collection/properties.test.ts +++ b/tests/serialized_indexable_collection/properties.test.ts @@ -4,43 +4,43 @@ import { kvdex, type KvValue, model, -} from "../../mod.ts" +} from "../../mod.ts"; import { ID_KEY_PREFIX, KVDEX_KEY_PREFIX, PRIMARY_INDEX_KEY_PREFIX, SECONDARY_INDEX_KEY_PREFIX, -} from "../../src/constants.ts" -import { extendKey, keyEq } from "../../src/utils.ts" -import { assert } from "../test.deps.ts" -import type { User } from "../models.ts" -import { generateLargeUsers, useDb, useKv } from "../utils.ts" -import { mockUser1 } from "../mocks.ts" -import { mockUser2 } from "../mocks.ts" -import { mockUser3 } from "../mocks.ts" -import { sleep } from "../utils.ts" - -const [user] = generateLargeUsers(1) +} from "../../src/constants.ts"; +import { extendKey, keyEq } from "../../src/utils.ts"; +import { assert } from "../test.deps.ts"; +import type { User } from "../models.ts"; +import { generateLargeUsers, useDb, useKv } from "../utils.ts"; +import { mockUser1 } from "../mocks.ts"; +import { mockUser2 } from "../mocks.ts"; +import { mockUser3 } from "../mocks.ts"; +import { sleep } from "../utils.ts"; + +const [user] = generateLargeUsers(1); Deno.test("serialized_indexable_collection - properties", async (t) => { await t.step("Keys should have the correct prefixes", async () => { await useDb((db) => { - const baseKey = db.is_users._keys.base - const idKey = db.is_users._keys.id - const primaryIndexKey = db.is_users._keys.primaryIndex - const secondaryIndexKey = db.is_users._keys.secondaryIndex - const prefix = extendKey([KVDEX_KEY_PREFIX], "is_users") - - assert(keyEq(baseKey, prefix)) - assert(keyEq(idKey, extendKey(prefix, ID_KEY_PREFIX))) + const baseKey = db.is_users._keys.base; + const idKey = db.is_users._keys.id; + const primaryIndexKey = db.is_users._keys.primaryIndex; + const secondaryIndexKey = db.is_users._keys.secondaryIndex; + const prefix = extendKey([KVDEX_KEY_PREFIX], "is_users"); + + assert(keyEq(baseKey, prefix)); + assert(keyEq(idKey, extendKey(prefix, ID_KEY_PREFIX))); assert( keyEq(primaryIndexKey, extendKey(prefix, PRIMARY_INDEX_KEY_PREFIX)), - ) + ); assert( keyEq(secondaryIndexKey, extendKey(prefix, SECONDARY_INDEX_KEY_PREFIX)), - ) - }) - }) + ); + }); + }); await t.step("Should generate ids with custom id generator", async () => { await useKv((kv) => { @@ -55,196 +55,196 @@ Deno.test("serialized_indexable_collection - properties", async (t) => { indices: {}, serialize: "json", }), - }) + }); - const id1 = db.users1._idGenerator(user) - const id2 = db.users2._idGenerator(user) + const id1 = db.users1._idGenerator(user); + const id2 = db.users2._idGenerator(user); - assert(typeof id1 === "number") - assert(id2 === user.username) - }) - }) + assert(typeof id1 === "number"); + assert(id2 === user.username); + }); + }); await t.step("Should select using cursor pagination", async () => { await useDb(async (db) => { - const users = generateLargeUsers(1_000) - const cr = await db.is_users.addMany(users) - assert(cr.ok) + const users = generateLargeUsers(1_000); + const cr = await db.is_users.addMany(users); + assert(cr.ok); - const selected: Document[] = [] - let cursor: string | undefined = undefined + const selected: Document[] = []; + let cursor: string | undefined = undefined; do { const query = await db.is_users.getMany({ cursor, limit: users.length / 10, - }) + }); - selected.push(...query.result) - cursor = query.cursor - } while (cursor) + selected.push(...query.result); + cursor = query.cursor; + } while (cursor); assert( users.every((user) => selected.some((doc) => doc.value.username === user.username) ), - ) - }) - }) + ); + }); + }); await t.step("Should select using offset pagination", async () => { await useDb(async (db) => { - const users = generateLargeUsers(1_000) - const cr = await db.is_users.addMany(users) - assert(cr.ok) + const users = generateLargeUsers(1_000); + const cr = await db.is_users.addMany(users); + assert(cr.ok); - const selected: Document[] = [] - const limit = 50 + const selected: Document[] = []; + const limit = 50; for (let offset = 0; offset < users.length; offset += limit) { - const { result } = await db.is_users.getMany({ offset, limit }) - selected.push(...result) - assert(result.length === 50) + const { result } = await db.is_users.getMany({ offset, limit }); + selected.push(...result); + assert(result.length === 50); } assert( users.every((user) => selected.some((doc) => doc.value.username === user.username) ), - ) - }) - }) + ); + }); + }); await t.step("Should select filtered", async () => { await useDb(async (db) => { - const users = generateLargeUsers(10) - const cr = await db.is_users.addMany(users) - const count1 = await db.is_users.count() - assert(cr.ok) - assert(count1 === users.length) + const users = generateLargeUsers(10); + const cr = await db.is_users.addMany(users); + const count1 = await db.is_users.count(); + assert(cr.ok); + assert(count1 === users.length); - const sliced = users.slice(5, 7) + const sliced = users.slice(5, 7); const { result } = await db.is_users.getMany({ filter: (doc) => sliced.map((user) => user.username).includes( doc.value.username, ), - }) + }); - assert(result.length === sliced.length) + assert(result.length === sliced.length); assert( result.every((doc) => sliced.some((user) => user.username === doc.value.username) ), - ) - }) - }) + ); + }); + }); await t.step("Should select in reverse", async () => { await useDb(async (db) => { - const users = generateLargeUsers(10) - const cr = await db.is_users.addMany(users) - const count1 = await db.is_users.count() - assert(cr.ok) - assert(count1 === users.length) + const users = generateLargeUsers(10); + const cr = await db.is_users.addMany(users); + const count1 = await db.is_users.count(); + assert(cr.ok); + assert(count1 === users.length); - const query1 = await db.is_users.getMany() - const query2 = await db.is_users.getMany({ reverse: true }) + const query1 = await db.is_users.getMany(); + const query2 = await db.is_users.getMany({ reverse: true }); assert( JSON.stringify(query1.result) === JSON.stringify(query2.result.reverse()), - ) - }) - }) + ); + }); + }); await t.step("Should select from start id", async () => { await useDb(async (db) => { - const users = generateLargeUsers(10) - const cr = await db.is_users.addMany(users) - const count1 = await db.is_users.count() - assert(cr.ok) - assert(count1 === users.length) + const users = generateLargeUsers(10); + const cr = await db.is_users.addMany(users); + const count1 = await db.is_users.count(); + assert(cr.ok); + assert(count1 === users.length); - const index = 5 + const index = 5; - const query1 = await db.is_users.getMany() + const query1 = await db.is_users.getMany(); const query2 = await db.is_users.getMany({ startId: query1.result.at(index)?.id, - }) + }); - assert(query2.result.length === query1.result.slice(index).length) + assert(query2.result.length === query1.result.slice(index).length); assert( query2.result.every((doc1) => query1.result.slice(index).some((doc2) => doc1.id === doc2.id) ), - ) - }) - }) + ); + }); + }); await t.step("Should select until end id", async () => { await useDb(async (db) => { - const users = generateLargeUsers(10) - const cr = await db.is_users.addMany(users) - const count1 = await db.is_users.count() - assert(cr.ok) - assert(count1 === users.length) + const users = generateLargeUsers(10); + const cr = await db.is_users.addMany(users); + const count1 = await db.is_users.count(); + assert(cr.ok); + assert(count1 === users.length); - const index = 5 + const index = 5; - const query1 = await db.is_users.getMany() + const query1 = await db.is_users.getMany(); const query2 = await db.is_users.getMany({ endId: query1.result.at(index)?.id, - }) + }); - assert(query2.result.length === query1.result.slice(0, index).length) + assert(query2.result.length === query1.result.slice(0, index).length); assert( query2.result.every((doc1) => query1.result.slice(0, index).some((doc2) => doc1.id === doc2.id) ), - ) - }) - }) + ); + }); + }); await t.step("Should select from start id to end id", async () => { await useDb(async (db) => { - const users = generateLargeUsers(10) - const cr = await db.is_users.addMany(users) - const count1 = await db.is_users.count() - assert(cr.ok) - assert(count1 === users.length) + const users = generateLargeUsers(10); + const cr = await db.is_users.addMany(users); + const count1 = await db.is_users.count(); + assert(cr.ok); + assert(count1 === users.length); - const index1 = 5 - const index2 = 7 + const index1 = 5; + const index2 = 7; - const query1 = await db.is_users.getMany() + const query1 = await db.is_users.getMany(); const query2 = await db.is_users.getMany({ startId: query1.result.at(index1)?.id, endId: query1.result.at(index2)?.id, - }) + }); assert( query2.result.length === query1.result.slice(index1, index2).length, - ) + ); assert( query2.result.every((doc1) => query1.result.slice(index1, index2).some((doc2) => doc1.id === doc2.id ) ), - ) - }) - }) + ); + }); + }); await t.step("Should allow optional indices", async () => { await useKv(async (kv) => { const db = kvdex(kv, { is: collection( model<{ - oblPrimary: string - oblSecondary: number - optPrimary?: string - optSecondary?: number - check?: Date + oblPrimary: string; + oblSecondary: number; + optPrimary?: string; + optSecondary?: number; + check?: Date; }>(), { indices: { @@ -256,168 +256,168 @@ Deno.test("serialized_indexable_collection - properties", async (t) => { serialize: "json", }, ), - }) + }); const cr1 = await db.is.add({ oblPrimary: "oblPrimary1", oblSecondary: 10, - }) + }); const cr2 = await db.is.add({ oblPrimary: "oblPrimary2", oblSecondary: 10, optPrimary: "optPrimary2", optSecondary: 20, - }) + }); - assert(cr1.ok) - assert(cr2.ok) + assert(cr1.ok); + assert(cr2.ok); const byOptPrimary2 = await db.is.findByPrimaryIndex( "optPrimary", "optPrimary2", - ) + ); const byOptSecondary2 = await db.is.findBySecondaryIndex( "optSecondary", 20, - ) + ); - assert(byOptPrimary2?.id === cr2.id) - assert(byOptSecondary2.result.length === 1) - assert(byOptSecondary2.result.some((i) => i.id === cr2.id)) + assert(byOptPrimary2?.id === cr2.id); + assert(byOptSecondary2.result.length === 1); + assert(byOptSecondary2.result.some((i) => i.id === cr2.id)); const cr3 = await db.is.add({ oblPrimary: "oblPrimary3", oblSecondary: 10, optPrimary: "optPrimary2", optSecondary: 20, - }) + }); - assert(!cr3.ok) + assert(!cr3.ok); const cr4 = await db.is.add({ oblPrimary: "oblPrimary4", oblSecondary: 10, optPrimary: "optPrimary4", optSecondary: 20, - }) + }); - assert(cr4.ok) + assert(cr4.ok); const byOptPrimary4 = await db.is.findByPrimaryIndex( "optPrimary", "optPrimary4", - ) + ); const byOptSecondary4 = await db.is.findBySecondaryIndex( "optSecondary", 20, - ) + ); - assert(byOptPrimary4?.id === cr4.id) - assert(byOptSecondary4.result.length === 2) - assert(byOptSecondary4.result.some((i) => i.id === cr2.id)) - assert(byOptSecondary4.result.some((i) => i.id === cr4.id)) - }) - }) + assert(byOptPrimary4?.id === cr4.id); + assert(byOptSecondary4.result.length === 2); + assert(byOptSecondary4.result.some((i) => i.id === cr2.id)); + assert(byOptSecondary4.result.some((i) => i.id === cr4.id)); + }); + }); await t.step("Should select limited by database reads", async () => { await useDb(async (db) => { - const cr1 = await db.is_users.add(mockUser1) - await sleep(10) - const cr2 = await db.is_users.add(mockUser2) - await sleep(10) - const cr3 = await db.is_users.add(mockUser3) + const cr1 = await db.is_users.add(mockUser1); + await sleep(10); + const cr2 = await db.is_users.add(mockUser2); + await sleep(10); + const cr3 = await db.is_users.add(mockUser3); - assert(cr1.ok) - assert(cr2.ok) - assert(cr3.ok) + assert(cr1.ok); + assert(cr2.ok); + assert(cr3.ok); const { result } = await db.is_users.getMany({ limit: 2, filter: (doc) => doc.value.username !== mockUser1.username, - }) + }); - assert(result.every((doc) => doc.value.username === mockUser2.username)) - }) - }) + assert(result.every((doc) => doc.value.username === mockUser2.username)); + }); + }); await t.step("Should select limited by result count", async () => { await useDb(async (db) => { - const cr1 = await db.is_users.add(mockUser1) - await sleep(10) - const cr2 = await db.is_users.add(mockUser2) - await sleep(10) - const cr3 = await db.is_users.add(mockUser3) + const cr1 = await db.is_users.add(mockUser1); + await sleep(10); + const cr2 = await db.is_users.add(mockUser2); + await sleep(10); + const cr3 = await db.is_users.add(mockUser3); - assert(cr1.ok) - assert(cr2.ok) - assert(cr3.ok) + assert(cr1.ok); + assert(cr2.ok); + assert(cr3.ok); const { result } = await db.is_users.getMany({ take: 2, filter: (doc) => doc.value.username !== mockUser1.username, - }) + }); - assert(result.length === 2) - assert(result.some((doc) => doc.value.username === mockUser2.username)) - assert(result.some((doc) => doc.value.username === mockUser3.username)) - }) - }) + assert(result.length === 2); + assert(result.some((doc) => doc.value.username === mockUser2.username)); + assert(result.some((doc) => doc.value.username === mockUser3.username)); + }); + }); await t.step("Should correctly infer type of document", async () => { await useDb(async (db) => { - const doc = await db.is_users.find("") + const doc = await db.is_users.find(""); if (doc) { - doc.value.age.valueOf() + doc.value.age.valueOf(); } - }) - }) + }); + }); await t.step( "Should correctly infer insert and output of asymmetric model", async () => { await useDb(async (db) => { - const cr = await db.ais_users.add(user) - assert(cr.ok) - - const doc = await db.ais_users.find(cr.id) - assert(doc !== null) - assert(typeof doc.value.addressStr === "string") - assert(typeof doc.value.decadeAge === "number") - assert(typeof doc.value.name === "string") - }) + const cr = await db.ais_users.add(user); + assert(cr.ok); + + const doc = await db.ais_users.find(cr.id); + assert(doc !== null); + assert(typeof doc.value.addressStr === "string"); + assert(typeof doc.value.decadeAge === "number"); + assert(typeof doc.value.name === "string"); + }); }, - ) + ); await t.step("Should enable indexing using all available types", async () => { type Data = { - p: KvValue - s: KvValue - } + p: KvValue; + s: KvValue; + }; await useKv(async (kv) => { - const val1 = undefined - const val2 = null - const val3 = 10 - const val4 = "string" - const val5 = 10n - const val6 = true - const val7 = new Int8Array([10, 20, 30]) - const val8 = new Int16Array([10, 20, 30]) - const val9 = new Int32Array([10, 20, 30]) - const val10 = new BigInt64Array([10n, 20n, 30n]) - const val11 = new Uint8Array([10, 20, 30]) - const val12 = new Uint16Array([10, 20, 30]) - const val13 = new Uint32Array([10, 20, 30]) - const val14 = new BigUint64Array([10n, 20n, 30n]) - const val15 = new Uint8ClampedArray([10, 20, 30]) - const val16 = new Float32Array([10.203423878293472837429384]) - const val17 = new Float64Array([10.203423878293472837429384]) - const val18 = new Uint8Array([10, 20, 30]).buffer - const val19 = new Date() - const val20 = new RegExp("[0-9]") - const val21 = new DataView(new Uint8Array([10, 20, 30]).buffer) - const val22 = new Error("error") + const val1 = undefined; + const val2 = null; + const val3 = 10; + const val4 = "string"; + const val5 = 10n; + const val6 = true; + const val7 = new Int8Array([10, 20, 30]); + const val8 = new Int16Array([10, 20, 30]); + const val9 = new Int32Array([10, 20, 30]); + const val10 = new BigInt64Array([10n, 20n, 30n]); + const val11 = new Uint8Array([10, 20, 30]); + const val12 = new Uint16Array([10, 20, 30]); + const val13 = new Uint32Array([10, 20, 30]); + const val14 = new BigUint64Array([10n, 20n, 30n]); + const val15 = new Uint8ClampedArray([10, 20, 30]); + const val16 = new Float32Array([10.203423878293472837429384]); + const val17 = new Float64Array([10.203423878293472837429384]); + const val18 = new Uint8Array([10, 20, 30]).buffer; + const val19 = new Date(); + const val20 = new RegExp("[0-9]"); + const val21 = new DataView(new Uint8Array([10, 20, 30]).buffer); + const val22 = new Error("error"); const val23 = [ val1, val2, @@ -441,7 +441,7 @@ Deno.test("serialized_indexable_collection - properties", async (t) => { val20, val21, val22, - ] + ]; const val24 = { val1, val2, @@ -466,8 +466,8 @@ Deno.test("serialized_indexable_collection - properties", async (t) => { val21, val22, val23, - } - const val25 = new Set(val23) + }; + const val25 = new Set(val23); const val26 = new Map([ ["val1", val1], ["val2", val2], @@ -491,7 +491,7 @@ Deno.test("serialized_indexable_collection - properties", async (t) => { ["val20", val20], ["val21", val21], ["val22", val22], - ]) + ]); const db = kvdex(kv, { val1: collection(model(), { @@ -676,115 +676,115 @@ Deno.test("serialized_indexable_collection - properties", async (t) => { s: "secondary", }, }), - }) - - const cr1 = await db.val1.add({ p: val1, s: val1 }) - const cr2 = await db.val2.add({ p: val2, s: val2 }) - const cr3 = await db.val3.add({ p: val3, s: val3 }) - const cr4 = await db.val4.add({ p: val4, s: val4 }) - const cr5 = await db.val5.add({ p: val5, s: val5 }) - const cr6 = await db.val6.add({ p: val6, s: val6 }) - const cr7 = await db.val7.add({ p: val7, s: val7 }) - const cr8 = await db.val8.add({ p: val8, s: val8 }) - const cr9 = await db.val9.add({ p: val9, s: val9 }) - const cr10 = await db.val10.add({ p: val10, s: val10 }) - const cr11 = await db.val11.add({ p: val11, s: val11 }) - const cr12 = await db.val12.add({ p: val12, s: val12 }) - const cr13 = await db.val13.add({ p: val13, s: val13 }) - const cr14 = await db.val14.add({ p: val14, s: val14 }) - const cr15 = await db.val15.add({ p: val15, s: val15 }) - const cr16 = await db.val16.add({ p: val16, s: val16 }) - const cr17 = await db.val17.add({ p: val17, s: val17 }) - const cr18 = await db.val18.add({ p: val18, s: val18 }) - const cr19 = await db.val19.add({ p: val19, s: val19 }) - const cr20 = await db.val20.add({ p: val20, s: val20 }) - const cr21 = await db.val21.add({ p: val21, s: val21 }) - const cr22 = await db.val22.add({ p: val22, s: val22 }) - const cr23 = await db.val23.add({ p: val23, s: val23 }) - const cr24 = await db.val24.add({ p: val24, s: val24 }) - const cr25 = await db.val25.add({ p: val25, s: val25 }) - const cr26 = await db.val26.add({ p: val26, s: val26 }) - - assert(cr1.ok) - assert(cr2.ok) - assert(cr3.ok) - assert(cr4.ok) - assert(cr5.ok) - assert(cr6.ok) - assert(cr7.ok) - assert(cr8.ok) - assert(cr9.ok) - assert(cr10.ok) - assert(cr11.ok) - assert(cr12.ok) - assert(cr13.ok) - assert(cr14.ok) - assert(cr15.ok) - assert(cr16.ok) - assert(cr17.ok) - assert(cr18.ok) - assert(cr19.ok) - assert(cr20.ok) - assert(cr21.ok) - assert(cr22.ok) - assert(cr23.ok) - assert(cr24.ok) - assert(cr25.ok) - assert(cr26.ok) + }); + + const cr1 = await db.val1.add({ p: val1, s: val1 }); + const cr2 = await db.val2.add({ p: val2, s: val2 }); + const cr3 = await db.val3.add({ p: val3, s: val3 }); + const cr4 = await db.val4.add({ p: val4, s: val4 }); + const cr5 = await db.val5.add({ p: val5, s: val5 }); + const cr6 = await db.val6.add({ p: val6, s: val6 }); + const cr7 = await db.val7.add({ p: val7, s: val7 }); + const cr8 = await db.val8.add({ p: val8, s: val8 }); + const cr9 = await db.val9.add({ p: val9, s: val9 }); + const cr10 = await db.val10.add({ p: val10, s: val10 }); + const cr11 = await db.val11.add({ p: val11, s: val11 }); + const cr12 = await db.val12.add({ p: val12, s: val12 }); + const cr13 = await db.val13.add({ p: val13, s: val13 }); + const cr14 = await db.val14.add({ p: val14, s: val14 }); + const cr15 = await db.val15.add({ p: val15, s: val15 }); + const cr16 = await db.val16.add({ p: val16, s: val16 }); + const cr17 = await db.val17.add({ p: val17, s: val17 }); + const cr18 = await db.val18.add({ p: val18, s: val18 }); + const cr19 = await db.val19.add({ p: val19, s: val19 }); + const cr20 = await db.val20.add({ p: val20, s: val20 }); + const cr21 = await db.val21.add({ p: val21, s: val21 }); + const cr22 = await db.val22.add({ p: val22, s: val22 }); + const cr23 = await db.val23.add({ p: val23, s: val23 }); + const cr24 = await db.val24.add({ p: val24, s: val24 }); + const cr25 = await db.val25.add({ p: val25, s: val25 }); + const cr26 = await db.val26.add({ p: val26, s: val26 }); + + assert(cr1.ok); + assert(cr2.ok); + assert(cr3.ok); + assert(cr4.ok); + assert(cr5.ok); + assert(cr6.ok); + assert(cr7.ok); + assert(cr8.ok); + assert(cr9.ok); + assert(cr10.ok); + assert(cr11.ok); + assert(cr12.ok); + assert(cr13.ok); + assert(cr14.ok); + assert(cr15.ok); + assert(cr16.ok); + assert(cr17.ok); + assert(cr18.ok); + assert(cr19.ok); + assert(cr20.ok); + assert(cr21.ok); + assert(cr22.ok); + assert(cr23.ok); + assert(cr24.ok); + assert(cr25.ok); + assert(cr26.ok); //const byPrimary1 = await db.val1.findByPrimaryIndex("p", val1) - const byPrimary2 = await db.val2.findByPrimaryIndex("p", val2) - const byPrimary3 = await db.val3.findByPrimaryIndex("p", val3) - const byPrimary4 = await db.val4.findByPrimaryIndex("p", val4) - const byPrimary5 = await db.val5.findByPrimaryIndex("p", val5) - const byPrimary6 = await db.val6.findByPrimaryIndex("p", val6) - const byPrimary7 = await db.val7.findByPrimaryIndex("p", val7) - const byPrimary8 = await db.val8.findByPrimaryIndex("p", val8) - const byPrimary9 = await db.val9.findByPrimaryIndex("p", val9) - const byPrimary10 = await db.val10.findByPrimaryIndex("p", val10) - const byPrimary11 = await db.val11.findByPrimaryIndex("p", val11) - const byPrimary12 = await db.val12.findByPrimaryIndex("p", val12) - const byPrimary13 = await db.val13.findByPrimaryIndex("p", val13) - const byPrimary14 = await db.val14.findByPrimaryIndex("p", val14) - const byPrimary15 = await db.val15.findByPrimaryIndex("p", val15) - const byPrimary16 = await db.val16.findByPrimaryIndex("p", val16) - const byPrimary17 = await db.val17.findByPrimaryIndex("p", val17) - const byPrimary18 = await db.val18.findByPrimaryIndex("p", val18) - const byPrimary19 = await db.val19.findByPrimaryIndex("p", val19) - const byPrimary20 = await db.val20.findByPrimaryIndex("p", val20) - const byPrimary21 = await db.val21.findByPrimaryIndex("p", val21) - const byPrimary22 = await db.val22.findByPrimaryIndex("p", val22) - const byPrimary23 = await db.val23.findByPrimaryIndex("p", val23) - const byPrimary24 = await db.val24.findByPrimaryIndex("p", val24) - const byPrimary25 = await db.val25.findByPrimaryIndex("p", val25) - const byPrimary26 = await db.val26.findByPrimaryIndex("p", val26) + const byPrimary2 = await db.val2.findByPrimaryIndex("p", val2); + const byPrimary3 = await db.val3.findByPrimaryIndex("p", val3); + const byPrimary4 = await db.val4.findByPrimaryIndex("p", val4); + const byPrimary5 = await db.val5.findByPrimaryIndex("p", val5); + const byPrimary6 = await db.val6.findByPrimaryIndex("p", val6); + const byPrimary7 = await db.val7.findByPrimaryIndex("p", val7); + const byPrimary8 = await db.val8.findByPrimaryIndex("p", val8); + const byPrimary9 = await db.val9.findByPrimaryIndex("p", val9); + const byPrimary10 = await db.val10.findByPrimaryIndex("p", val10); + const byPrimary11 = await db.val11.findByPrimaryIndex("p", val11); + const byPrimary12 = await db.val12.findByPrimaryIndex("p", val12); + const byPrimary13 = await db.val13.findByPrimaryIndex("p", val13); + const byPrimary14 = await db.val14.findByPrimaryIndex("p", val14); + const byPrimary15 = await db.val15.findByPrimaryIndex("p", val15); + const byPrimary16 = await db.val16.findByPrimaryIndex("p", val16); + const byPrimary17 = await db.val17.findByPrimaryIndex("p", val17); + const byPrimary18 = await db.val18.findByPrimaryIndex("p", val18); + const byPrimary19 = await db.val19.findByPrimaryIndex("p", val19); + const byPrimary20 = await db.val20.findByPrimaryIndex("p", val20); + const byPrimary21 = await db.val21.findByPrimaryIndex("p", val21); + const byPrimary22 = await db.val22.findByPrimaryIndex("p", val22); + const byPrimary23 = await db.val23.findByPrimaryIndex("p", val23); + const byPrimary24 = await db.val24.findByPrimaryIndex("p", val24); + const byPrimary25 = await db.val25.findByPrimaryIndex("p", val25); + const byPrimary26 = await db.val26.findByPrimaryIndex("p", val26); //assert(byPrimary1 !== null) - assert(byPrimary2 !== null) - assert(byPrimary3 !== null) - assert(byPrimary4 !== null) - assert(byPrimary5 !== null) - assert(byPrimary6 !== null) - assert(byPrimary7 !== null) - assert(byPrimary8 !== null) - assert(byPrimary9 !== null) - assert(byPrimary10 !== null) - assert(byPrimary11 !== null) - assert(byPrimary12 !== null) - assert(byPrimary13 !== null) - assert(byPrimary14 !== null) - assert(byPrimary15 !== null) - assert(byPrimary16 !== null) - assert(byPrimary17 !== null) - assert(byPrimary18 !== null) - assert(byPrimary19 !== null) - assert(byPrimary20 !== null) - assert(byPrimary21 !== null) - assert(byPrimary22 !== null) - assert(byPrimary23 !== null) - assert(byPrimary24 !== null) - assert(byPrimary25 !== null) - assert(byPrimary26 !== null) + assert(byPrimary2 !== null); + assert(byPrimary3 !== null); + assert(byPrimary4 !== null); + assert(byPrimary5 !== null); + assert(byPrimary6 !== null); + assert(byPrimary7 !== null); + assert(byPrimary8 !== null); + assert(byPrimary9 !== null); + assert(byPrimary10 !== null); + assert(byPrimary11 !== null); + assert(byPrimary12 !== null); + assert(byPrimary13 !== null); + assert(byPrimary14 !== null); + assert(byPrimary15 !== null); + assert(byPrimary16 !== null); + assert(byPrimary17 !== null); + assert(byPrimary18 !== null); + assert(byPrimary19 !== null); + assert(byPrimary20 !== null); + assert(byPrimary21 !== null); + assert(byPrimary22 !== null); + assert(byPrimary23 !== null); + assert(byPrimary24 !== null); + assert(byPrimary25 !== null); + assert(byPrimary26 !== null); /*const { result: bySecondary1 } = await db.val1.findBySecondaryIndex( "s", @@ -794,132 +794,132 @@ Deno.test("serialized_indexable_collection - properties", async (t) => { const { result: bySecondary2 } = await db.val2.findBySecondaryIndex( "s", val2, - ) + ); const { result: bySecondary3 } = await db.val3.findBySecondaryIndex( "s", val3, - ) + ); const { result: bySecondary4 } = await db.val4.findBySecondaryIndex( "s", val4, - ) + ); const { result: bySecondary5 } = await db.val5.findBySecondaryIndex( "s", val5, - ) + ); const { result: bySecondary6 } = await db.val6.findBySecondaryIndex( "s", val6, - ) + ); const { result: bySecondary7 } = await db.val7.findBySecondaryIndex( "s", val7, - ) + ); const { result: bySecondary8 } = await db.val8.findBySecondaryIndex( "s", val8, - ) + ); const { result: bySecondary9 } = await db.val9.findBySecondaryIndex( "s", val9, - ) + ); const { result: bySecondary10 } = await db.val10.findBySecondaryIndex( "s", val10, - ) + ); const { result: bySecondary11 } = await db.val11.findBySecondaryIndex( "s", val11, - ) + ); const { result: bySecondary12 } = await db.val12.findBySecondaryIndex( "s", val12, - ) + ); const { result: bySecondary13 } = await db.val13.findBySecondaryIndex( "s", val13, - ) + ); const { result: bySecondary14 } = await db.val14.findBySecondaryIndex( "s", val14, - ) + ); const { result: bySecondary15 } = await db.val15.findBySecondaryIndex( "s", val15, - ) + ); const { result: bySecondary16 } = await db.val16.findBySecondaryIndex( "s", val16, - ) + ); const { result: bySecondary17 } = await db.val17.findBySecondaryIndex( "s", val17, - ) + ); const { result: bySecondary18 } = await db.val18.findBySecondaryIndex( "s", val18, - ) + ); const { result: bySecondary19 } = await db.val19.findBySecondaryIndex( "s", val19, - ) + ); const { result: bySecondary20 } = await db.val20.findBySecondaryIndex( "s", val20, - ) + ); const { result: bySecondary21 } = await db.val21.findBySecondaryIndex( "s", val21, - ) + ); const { result: bySecondary22 } = await db.val22.findBySecondaryIndex( "s", val22, - ) + ); const { result: bySecondary23 } = await db.val23.findBySecondaryIndex( "s", val23, - ) + ); const { result: bySecondary24 } = await db.val24.findBySecondaryIndex( "s", val24, - ) + ); const { result: bySecondary25 } = await db.val25.findBySecondaryIndex( "s", val25, - ) + ); const { result: bySecondary26 } = await db.val26.findBySecondaryIndex( "s", val26, - ) + ); //assert(bySecondary1.length === 1) - assert(bySecondary2.length === 1) - assert(bySecondary3.length === 1) - assert(bySecondary4.length === 1) - assert(bySecondary5.length === 1) - assert(bySecondary6.length === 1) - assert(bySecondary7.length === 1) - assert(bySecondary8.length === 1) - assert(bySecondary9.length === 1) - assert(bySecondary10.length === 1) - assert(bySecondary11.length === 1) - assert(bySecondary12.length === 1) - assert(bySecondary13.length === 1) - assert(bySecondary14.length === 1) - assert(bySecondary15.length === 1) - assert(bySecondary16.length === 1) - assert(bySecondary17.length === 1) - assert(bySecondary18.length === 1) - assert(bySecondary19.length === 1) - assert(bySecondary20.length === 1) - assert(bySecondary21.length === 1) - assert(bySecondary22.length === 1) - assert(bySecondary23.length === 1) - assert(bySecondary24.length === 1) - assert(bySecondary25.length === 1) - assert(bySecondary26.length === 1) - }) - }) + assert(bySecondary2.length === 1); + assert(bySecondary3.length === 1); + assert(bySecondary4.length === 1); + assert(bySecondary5.length === 1); + assert(bySecondary6.length === 1); + assert(bySecondary7.length === 1); + assert(bySecondary8.length === 1); + assert(bySecondary9.length === 1); + assert(bySecondary10.length === 1); + assert(bySecondary11.length === 1); + assert(bySecondary12.length === 1); + assert(bySecondary13.length === 1); + assert(bySecondary14.length === 1); + assert(bySecondary15.length === 1); + assert(bySecondary16.length === 1); + assert(bySecondary17.length === 1); + assert(bySecondary18.length === 1); + assert(bySecondary19.length === 1); + assert(bySecondary20.length === 1); + assert(bySecondary21.length === 1); + assert(bySecondary22.length === 1); + assert(bySecondary23.length === 1); + assert(bySecondary24.length === 1); + assert(bySecondary25.length === 1); + assert(bySecondary26.length === 1); + }); + }); await t.step("Should successfully generate id asynchronously", async () => { await useKv(async (kv) => { @@ -934,16 +934,16 @@ Deno.test("serialized_indexable_collection - properties", async (t) => { const buffer = await crypto.subtle.digest( "SHA-256", new ArrayBuffer(user.age), - ) - return Math.random() * buffer.byteLength + ); + return Math.random() * buffer.byteLength; }, }), - }) + }); - const cr1 = await db.test.add(mockUser1) + const cr1 = await db.test.add(mockUser1); - assert(cr1.ok) - assert(typeof cr1.id === "number") - }) - }) -}) + assert(cr1.ok); + assert(typeof cr1.id === "number"); + }); + }); +}); diff --git a/tests/serialized_indexable_collection/set.test.ts b/tests/serialized_indexable_collection/set.test.ts index af65431..b2133a5 100644 --- a/tests/serialized_indexable_collection/set.test.ts +++ b/tests/serialized_indexable_collection/set.test.ts @@ -1,122 +1,122 @@ -import { assert } from "../test.deps.ts" -import { mockUserInvalid } from "../mocks.ts" -import { generateLargeUsers, useDb } from "../utils.ts" +import { assert } from "../test.deps.ts"; +import { mockUserInvalid } from "../mocks.ts"; +import { generateLargeUsers, useDb } from "../utils.ts"; -const [user1, user2] = generateLargeUsers(2) +const [user1, user2] = generateLargeUsers(2); Deno.test("serialized_indexable_collection - set", async (t) => { await t.step("Should set new document entry in collection", async () => { await useDb(async (db) => { - const cr = await db.is_users.set("id", user1) - assert(cr.ok) + const cr = await db.is_users.set("id", user1); + assert(cr.ok); - const doc = await db.is_users.find(cr.id) - assert(doc !== null) - assert(doc.value.username === user1.username) - }) - }) + const doc = await db.is_users.find(cr.id); + assert(doc !== null); + assert(doc.value.username === user1.username); + }); + }); await t.step( "Should not set new document entry in collection with colliding id", async () => { await useDb(async (db) => { - const cr1 = await db.is_users.set("id", user1) - assert(cr1.ok) + const cr1 = await db.is_users.set("id", user1); + assert(cr1.ok); - const cr2 = await db.is_users.set("id", user2) - assert(!cr2.ok) + const cr2 = await db.is_users.set("id", user2); + assert(!cr2.ok); - const doc = await db.is_users.find("id") - assert(doc !== null) - assert(doc.value.username === user1.username) - }) + const doc = await db.is_users.find("id"); + assert(doc !== null); + assert(doc.value.username === user1.username); + }); }, - ) + ); await t.step( "Should not set new document entry in collection with primary index", async () => { await useDb(async (db) => { - const cr1 = await db.is_users.set("id1", user1) - assert(cr1.ok) + const cr1 = await db.is_users.set("id1", user1); + assert(cr1.ok); - const cr2 = await db.is_users.set("id2", user1) - assert(!cr2.ok) + const cr2 = await db.is_users.set("id2", user1); + assert(!cr2.ok); const byPrimary = await db.is_users.findByPrimaryIndex( "username", user1.username, - ) + ); const bySecondary = await db.is_users.findBySecondaryIndex( "age", user1.age, - ) + ); - assert(byPrimary?.id === cr1.id) - assert(bySecondary.result.length === 1) - }) + assert(byPrimary?.id === cr1.id); + assert(bySecondary.result.length === 1); + }); }, - ) + ); await t.step( "Should overwrite document in collection with colliding id", async () => { await useDb(async (db) => { - const cr1 = await db.is_users.set("id", user1) - assert(cr1.ok) + const cr1 = await db.is_users.set("id", user1); + assert(cr1.ok); - const cr2 = await db.is_users.set("id", user2, { overwrite: true }) - assert(cr2.ok) + const cr2 = await db.is_users.set("id", user2, { overwrite: true }); + assert(cr2.ok); - const doc = await db.is_users.find("id") - assert(doc !== null) - assert(doc.value.username === user2.username) - }) + const doc = await db.is_users.find("id"); + assert(doc !== null); + assert(doc.value.username === user2.username); + }); }, - ) + ); await t.step( "Should not overwrite document in collection with colliding primary index", async () => { await useDb(async (db) => { - const cr1 = await db.is_users.set("id1", user1) - assert(cr1.ok) + const cr1 = await db.is_users.set("id1", user1); + assert(cr1.ok); - const cr2 = await db.is_users.set("id2", user1, { overwrite: true }) - assert(!cr2.ok) + const cr2 = await db.is_users.set("id2", user1, { overwrite: true }); + assert(!cr2.ok); const byPrimary = await db.is_users.findByPrimaryIndex( "username", user1.username, - ) + ); const bySecondary = await db.is_users.findBySecondaryIndex( "age", user1.age, - ) + ); - assert(byPrimary?.id === cr1.id) - assert(bySecondary.result.length === 1) - }) + assert(byPrimary?.id === cr1.id); + assert(bySecondary.result.length === 1); + }); }, - ) + ); await t.step("Should successfully parse and set document", async () => { await useDb(async (db) => { - let assertion = true - await db.zis_users.set("id", user1).catch(() => assertion = false) - assert(assertion) - }) - }) + let assertion = true; + await db.zis_users.set("id", user1).catch(() => assertion = false); + assert(assertion); + }); + }); await t.step("Should fail to parse and set document", async () => { await useDb(async (db) => { - let assertion = false + let assertion = false; await db.zis_users.set("id", mockUserInvalid).catch(() => assertion = true - ) - assert(assertion) - }) - }) -}) + ); + assert(assertion); + }); + }); +}); diff --git a/tests/serialized_indexable_collection/types.test.ts b/tests/serialized_indexable_collection/types.test.ts index 8c915c0..ccfb9ae 100644 --- a/tests/serialized_indexable_collection/types.test.ts +++ b/tests/serialized_indexable_collection/types.test.ts @@ -1,7 +1,7 @@ -import { collection, kvdex, model } from "../../mod.ts" -import { assert, assertEquals } from "../test.deps.ts" -import { useKv } from "../utils.ts" -import { TObject } from "../values.ts" +import { collection, kvdex, model } from "../../mod.ts"; +import { assert, assertEquals } from "../test.deps.ts"; +import { useKv } from "../utils.ts"; +import { TObject } from "../values.ts"; Deno.test("serialized_indexable_collection - types", async (t) => { await t.step( @@ -16,14 +16,14 @@ Deno.test("serialized_indexable_collection - types", async (t) => { TNumber: "secondary", }, }), - }) + }); - const cr = await db.objects.add(TObject) - assert(cr.ok) + const cr = await db.objects.add(TObject); + assert(cr.ok); - const doc = await db.objects.find(cr.id) - assertEquals(doc?.value, TObject) - }) + const doc = await db.objects.find(cr.id); + assertEquals(doc?.value, TObject); + }); }, - ) -}) + ); +}); diff --git a/tests/serialized_indexable_collection/update.test.ts b/tests/serialized_indexable_collection/update.test.ts index 57e83e0..ad8ad71 100644 --- a/tests/serialized_indexable_collection/update.test.ts +++ b/tests/serialized_indexable_collection/update.test.ts @@ -1,16 +1,16 @@ -import type { Document } from "../../mod.ts" -import { assert } from "../test.deps.ts" -import { mockUser1, mockUser2, mockUser3, mockUserInvalid } from "../mocks.ts" -import type { User } from "../models.ts" -import { useDb } from "../utils.ts" +import type { Document } from "../../mod.ts"; +import { assert } from "../test.deps.ts"; +import { mockUser1, mockUser2, mockUser3, mockUserInvalid } from "../mocks.ts"; +import type { User } from "../models.ts"; +import { useDb } from "../utils.ts"; Deno.test("serialized_indexable_collection - update", async (t) => { await t.step( "Should update document of KvObject type using shallow merge", async () => { await useDb(async (db) => { - const cr = await db.is_users.add(mockUser1) - assert(cr.ok) + const cr = await db.is_users.add(mockUser1); + assert(cr.ok); const updateData = { address: { @@ -18,55 +18,57 @@ Deno.test("serialized_indexable_collection - update", async (t) => { city: "Dublin", houseNr: null, }, - } + }; const updateCr = await db.is_users.update(cr.id, updateData, { strategy: "merge-shallow", - }) + }); - const byId = await db.is_users.find(cr.id) + const byId = await db.is_users.find(cr.id); const byPrimary = await db.is_users.findByPrimaryIndex( "username", mockUser1.username, - ) + ); const bySecondary = await db.is_users.findBySecondaryIndex( "age", mockUser1.age, - ) - - assert(updateCr.ok) - assert(updateCr.id === cr.id) - assert(byPrimary?.id === cr.id) - assert(bySecondary.result.at(0)?.id === cr.id) - assert(updateCr.versionstamp !== cr.versionstamp) - assert(updateCr.versionstamp === byPrimary.versionstamp) - assert(updateCr.versionstamp === bySecondary.result.at(0)?.versionstamp) + ); + + assert(updateCr.ok); + assert(updateCr.id === cr.id); + assert(byPrimary?.id === cr.id); + assert(bySecondary.result.at(0)?.id === cr.id); + assert(updateCr.versionstamp !== cr.versionstamp); + assert(updateCr.versionstamp === byPrimary.versionstamp); + assert( + updateCr.versionstamp === bySecondary.result.at(0)?.versionstamp, + ); const asserts = (doc: Document | null) => { - assert(doc !== null) - assert(doc.value.username === mockUser1.username) - assert(doc.value.age === mockUser1.age) - assert(doc.value.address.country === updateData.address.country) - assert(doc.value.address.city === updateData.address.city) - assert(doc.value.address.houseNr === updateData.address.houseNr) - assert(typeof doc.value.address.street === "undefined") - } - - asserts(byId) - asserts(byPrimary) - asserts(bySecondary.result.at(0) ?? null) - }) + assert(doc !== null); + assert(doc.value.username === mockUser1.username); + assert(doc.value.age === mockUser1.age); + assert(doc.value.address.country === updateData.address.country); + assert(doc.value.address.city === updateData.address.city); + assert(doc.value.address.houseNr === updateData.address.houseNr); + assert(typeof doc.value.address.street === "undefined"); + }; + + asserts(byId); + asserts(byPrimary); + asserts(bySecondary.result.at(0) ?? null); + }); }, - ) + ); await t.step( "Should update document of KvObject type using deep merge", async () => { await useDb(async (db) => { - const cr = await db.is_users.add(mockUser1) - assert(cr.ok) + const cr = await db.is_users.add(mockUser1); + assert(cr.ok); const updateData = { address: { @@ -74,154 +76,160 @@ Deno.test("serialized_indexable_collection - update", async (t) => { city: "Dublin", houseNr: null, }, - } + }; const updateCr = await db.is_users.update(cr.id, updateData, { strategy: "merge", - }) + }); - const byId = await db.is_users.find(cr.id) + const byId = await db.is_users.find(cr.id); const byPrimary = await db.is_users.findByPrimaryIndex( "username", mockUser1.username, - ) + ); const bySecondary = await db.is_users.findBySecondaryIndex( "age", mockUser1.age, - ) - - assert(updateCr.ok) - assert(updateCr.id === cr.id) - assert(byPrimary?.id === cr.id) - assert(bySecondary.result.at(0)?.id === cr.id) - assert(updateCr.versionstamp !== cr.versionstamp) - assert(updateCr.versionstamp === byPrimary.versionstamp) - assert(updateCr.versionstamp === bySecondary.result.at(0)?.versionstamp) + ); + + assert(updateCr.ok); + assert(updateCr.id === cr.id); + assert(byPrimary?.id === cr.id); + assert(bySecondary.result.at(0)?.id === cr.id); + assert(updateCr.versionstamp !== cr.versionstamp); + assert(updateCr.versionstamp === byPrimary.versionstamp); + assert( + updateCr.versionstamp === bySecondary.result.at(0)?.versionstamp, + ); const asserts = (doc: Document | null) => { - assert(doc !== null) - assert(doc.value.username === mockUser1.username) - assert(doc.value.age === mockUser1.age) - assert(doc.value.address.country === updateData.address.country) - assert(doc.value.address.city === updateData.address.city) - assert(doc.value.address.houseNr === updateData.address.houseNr) - assert(doc.value.address.street !== undefined) - } - - asserts(byId) - asserts(byPrimary) - asserts(bySecondary.result.at(0) ?? null) - }) + assert(doc !== null); + assert(doc.value.username === mockUser1.username); + assert(doc.value.age === mockUser1.age); + assert(doc.value.address.country === updateData.address.country); + assert(doc.value.address.city === updateData.address.city); + assert(doc.value.address.houseNr === updateData.address.houseNr); + assert(doc.value.address.street !== undefined); + }; + + asserts(byId); + asserts(byPrimary); + asserts(bySecondary.result.at(0) ?? null); + }); }, - ) + ); await t.step( "Should update document of KvObject type using replace", async () => { await useDb(async (db) => { - const cr = await db.is_users.add(mockUser1) - assert(cr.ok) + const cr = await db.is_users.add(mockUser1); + assert(cr.ok); const updateCr = await db.is_users.update(cr.id, mockUser2, { strategy: "replace", - }) + }); - const byId = await db.is_users.find(cr.id) + const byId = await db.is_users.find(cr.id); const byPrimary = await db.is_users.findByPrimaryIndex( "username", mockUser2.username, - ) + ); const bySecondary = await db.is_users.findBySecondaryIndex( "age", mockUser2.age, - ) - - assert(updateCr.ok) - assert(updateCr.id === cr.id) - assert(byPrimary?.id === cr.id) - assert(bySecondary.result.at(0)?.id === cr.id) - assert(updateCr.versionstamp !== cr.versionstamp) - assert(updateCr.versionstamp === byPrimary.versionstamp) - assert(updateCr.versionstamp === bySecondary.result.at(0)?.versionstamp) + ); + + assert(updateCr.ok); + assert(updateCr.id === cr.id); + assert(byPrimary?.id === cr.id); + assert(bySecondary.result.at(0)?.id === cr.id); + assert(updateCr.versionstamp !== cr.versionstamp); + assert(updateCr.versionstamp === byPrimary.versionstamp); + assert( + updateCr.versionstamp === bySecondary.result.at(0)?.versionstamp, + ); const asserts = (doc: Document | null) => { - assert(doc !== null) - assert(doc.value.username === mockUser2.username) - assert(doc.value.age === mockUser2.age) - assert(doc.value.address.country === mockUser2.address.country) - assert(doc.value.address.city === mockUser2.address.city) - assert(doc.value.address.houseNr === mockUser2.address.houseNr) - assert(doc.value.address.street === mockUser2.address.street) - } - - asserts(byId) - asserts(byPrimary) - asserts(bySecondary.result.at(0) ?? null) - }) + assert(doc !== null); + assert(doc.value.username === mockUser2.username); + assert(doc.value.age === mockUser2.age); + assert(doc.value.address.country === mockUser2.address.country); + assert(doc.value.address.city === mockUser2.address.city); + assert(doc.value.address.houseNr === mockUser2.address.houseNr); + assert(doc.value.address.street === mockUser2.address.street); + }; + + asserts(byId); + asserts(byPrimary); + asserts(bySecondary.result.at(0) ?? null); + }); }, - ) + ); await t.step( "Should not update document or delete indexed entries upon index collision", async () => { await useDb(async (db) => { - const id1 = "id1" - const id2 = "id2" + const id1 = "id1"; + const id2 = "id2"; - const cr1 = await db.is_users.set(id1, mockUser1) - const cr2 = await db.is_users.set(id2, mockUser2) + const cr1 = await db.is_users.set(id1, mockUser1); + const cr2 = await db.is_users.set(id2, mockUser2); - assert(cr1.ok) - assert(cr2.ok) + assert(cr1.ok); + assert(cr2.ok); const update = await db.is_users.update(id2, { ...mockUser3, username: mockUser2.username, - }) + }); - assert(!update.ok) + assert(!update.ok); - const doc = await db.is_users.find(id2) + const doc = await db.is_users.find(id2); const docByPrimaryIndex = await db.is_users.findByPrimaryIndex( "username", mockUser2.username, - ) + ); - assert(doc?.value.username === mockUser2.username) - assert(docByPrimaryIndex?.value.username === mockUser2.username) - }) + assert(doc?.value.username === mockUser2.username); + assert(docByPrimaryIndex?.value.username === mockUser2.username); + }); }, - ) + ); await t.step("Should successfully parse and update document", async () => { await useDb(async (db) => { - let assertion = true + let assertion = true; - const cr = await db.zis_users.add(mockUser1) - assert(cr.ok) + const cr = await db.zis_users.add(mockUser1); + assert(cr.ok); - await db.zis_users.update(cr.id, mockUser2).catch(() => assertion = false) + await db.zis_users.update(cr.id, mockUser2).catch(() => + assertion = false + ); - assert(assertion) - }) - }) + assert(assertion); + }); + }); await t.step("Should fail to parse and update document", async () => { await useDb(async (db) => { - let assertion = false + let assertion = false; - const cr = await db.zis_users.add(mockUser1) - assert(cr.ok) + const cr = await db.zis_users.add(mockUser1); + assert(cr.ok); await db.zis_users.update(cr.id, mockUserInvalid).catch(() => assertion = true - ) + ); - assert(assertion) - }) - }) -}) + assert(assertion); + }); + }); +}); diff --git a/tests/serialized_indexable_collection/updateByPrimaryIndex.test.ts b/tests/serialized_indexable_collection/updateByPrimaryIndex.test.ts index 3d432f6..4596ee2 100644 --- a/tests/serialized_indexable_collection/updateByPrimaryIndex.test.ts +++ b/tests/serialized_indexable_collection/updateByPrimaryIndex.test.ts @@ -1,16 +1,16 @@ -import type { Document } from "../../mod.ts" -import { assert } from "../test.deps.ts" -import { mockUser1, mockUser2, mockUserInvalid } from "../mocks.ts" -import type { User } from "../models.ts" -import { useDb } from "../utils.ts" +import type { Document } from "../../mod.ts"; +import { assert } from "../test.deps.ts"; +import { mockUser1, mockUser2, mockUserInvalid } from "../mocks.ts"; +import type { User } from "../models.ts"; +import { useDb } from "../utils.ts"; Deno.test("serialized_indexable_collection - updateByPrimaryIndex", async (t) => { await t.step( "Should update document of KvObject type using shallow merge", async () => { await useDb(async (db) => { - const cr = await db.is_users.add(mockUser1) - assert(cr.ok) + const cr = await db.is_users.add(mockUser1); + assert(cr.ok); const updateData = { address: { @@ -18,7 +18,7 @@ Deno.test("serialized_indexable_collection - updateByPrimaryIndex", async (t) => city: "Dublin", houseNr: null, }, - } + }; const updateCr = await db.is_users.updateByPrimaryIndex( "username", @@ -27,51 +27,53 @@ Deno.test("serialized_indexable_collection - updateByPrimaryIndex", async (t) => { strategy: "merge-shallow", }, - ) + ); - const byId = await db.is_users.find(cr.id) + const byId = await db.is_users.find(cr.id); const byPrimary = await db.is_users.findByPrimaryIndex( "username", mockUser1.username, - ) + ); const bySecondary = await db.is_users.findBySecondaryIndex( "age", mockUser1.age, - ) - - assert(updateCr.ok) - assert(updateCr.id === cr.id) - assert(byPrimary?.id === cr.id) - assert(bySecondary.result.at(0)?.id === cr.id) - assert(updateCr.versionstamp !== cr.versionstamp) - assert(updateCr.versionstamp === byPrimary.versionstamp) - assert(updateCr.versionstamp === bySecondary.result.at(0)?.versionstamp) + ); + + assert(updateCr.ok); + assert(updateCr.id === cr.id); + assert(byPrimary?.id === cr.id); + assert(bySecondary.result.at(0)?.id === cr.id); + assert(updateCr.versionstamp !== cr.versionstamp); + assert(updateCr.versionstamp === byPrimary.versionstamp); + assert( + updateCr.versionstamp === bySecondary.result.at(0)?.versionstamp, + ); const asserts = (doc: Document | null) => { - assert(doc !== null) - assert(doc.value.username === mockUser1.username) - assert(doc.value.age === mockUser1.age) - assert(doc.value.address.country === updateData.address.country) - assert(doc.value.address.city === updateData.address.city) - assert(doc.value.address.houseNr === updateData.address.houseNr) - assert(typeof doc.value.address.street === "undefined") - } - - asserts(byId) - asserts(byPrimary) - asserts(bySecondary.result.at(0) ?? null) - }) + assert(doc !== null); + assert(doc.value.username === mockUser1.username); + assert(doc.value.age === mockUser1.age); + assert(doc.value.address.country === updateData.address.country); + assert(doc.value.address.city === updateData.address.city); + assert(doc.value.address.houseNr === updateData.address.houseNr); + assert(typeof doc.value.address.street === "undefined"); + }; + + asserts(byId); + asserts(byPrimary); + asserts(bySecondary.result.at(0) ?? null); + }); }, - ) + ); await t.step( "Should update document of KvObject type using deep merge", async () => { await useDb(async (db) => { - const cr = await db.is_users.add(mockUser1) - assert(cr.ok) + const cr = await db.is_users.add(mockUser1); + assert(cr.ok); const updateData = { address: { @@ -79,7 +81,7 @@ Deno.test("serialized_indexable_collection - updateByPrimaryIndex", async (t) => city: "Dublin", houseNr: null, }, - } + }; const updateCr = await db.is_users.updateByPrimaryIndex( "username", @@ -88,51 +90,53 @@ Deno.test("serialized_indexable_collection - updateByPrimaryIndex", async (t) => { strategy: "merge", }, - ) + ); - const byId = await db.is_users.find(cr.id) + const byId = await db.is_users.find(cr.id); const byPrimary = await db.is_users.findByPrimaryIndex( "username", mockUser1.username, - ) + ); const bySecondary = await db.is_users.findBySecondaryIndex( "age", mockUser1.age, - ) - - assert(updateCr.ok) - assert(updateCr.id === cr.id) - assert(byPrimary?.id === cr.id) - assert(bySecondary.result.at(0)?.id === cr.id) - assert(updateCr.versionstamp !== cr.versionstamp) - assert(updateCr.versionstamp === byPrimary.versionstamp) - assert(updateCr.versionstamp === bySecondary.result.at(0)?.versionstamp) + ); + + assert(updateCr.ok); + assert(updateCr.id === cr.id); + assert(byPrimary?.id === cr.id); + assert(bySecondary.result.at(0)?.id === cr.id); + assert(updateCr.versionstamp !== cr.versionstamp); + assert(updateCr.versionstamp === byPrimary.versionstamp); + assert( + updateCr.versionstamp === bySecondary.result.at(0)?.versionstamp, + ); const asserts = (doc: Document | null) => { - assert(doc !== null) - assert(doc.value.username === mockUser1.username) - assert(doc.value.age === mockUser1.age) - assert(doc.value.address.country === updateData.address.country) - assert(doc.value.address.city === updateData.address.city) - assert(doc.value.address.houseNr === updateData.address.houseNr) - assert(doc.value.address.street !== undefined) - } - - asserts(byId) - asserts(byPrimary) - asserts(bySecondary.result.at(0) ?? null) - }) + assert(doc !== null); + assert(doc.value.username === mockUser1.username); + assert(doc.value.age === mockUser1.age); + assert(doc.value.address.country === updateData.address.country); + assert(doc.value.address.city === updateData.address.city); + assert(doc.value.address.houseNr === updateData.address.houseNr); + assert(doc.value.address.street !== undefined); + }; + + asserts(byId); + asserts(byPrimary); + asserts(bySecondary.result.at(0) ?? null); + }); }, - ) + ); await t.step( "Should update document of KvObject type using replace", async () => { await useDb(async (db) => { - const cr = await db.is_users.add(mockUser1) - assert(cr.ok) + const cr = await db.is_users.add(mockUser1); + assert(cr.ok); const updateCr = await db.is_users.updateByPrimaryIndex( "username", @@ -141,76 +145,78 @@ Deno.test("serialized_indexable_collection - updateByPrimaryIndex", async (t) => { strategy: "replace", }, - ) + ); - const byId = await db.is_users.find(cr.id) + const byId = await db.is_users.find(cr.id); const byPrimary = await db.is_users.findByPrimaryIndex( "username", mockUser2.username, - ) + ); const bySecondary = await db.is_users.findBySecondaryIndex( "age", mockUser2.age, - ) - - assert(updateCr.ok) - assert(updateCr.id === cr.id) - assert(byPrimary?.id === cr.id) - assert(bySecondary.result.at(0)?.id === cr.id) - assert(updateCr.versionstamp !== cr.versionstamp) - assert(updateCr.versionstamp === byPrimary.versionstamp) - assert(updateCr.versionstamp === bySecondary.result.at(0)?.versionstamp) + ); + + assert(updateCr.ok); + assert(updateCr.id === cr.id); + assert(byPrimary?.id === cr.id); + assert(bySecondary.result.at(0)?.id === cr.id); + assert(updateCr.versionstamp !== cr.versionstamp); + assert(updateCr.versionstamp === byPrimary.versionstamp); + assert( + updateCr.versionstamp === bySecondary.result.at(0)?.versionstamp, + ); const asserts = (doc: Document | null) => { - assert(doc !== null) - assert(doc.value.username === mockUser2.username) - assert(doc.value.age === mockUser2.age) - assert(doc.value.address.country === mockUser2.address.country) - assert(doc.value.address.city === mockUser2.address.city) - assert(doc.value.address.houseNr === mockUser2.address.houseNr) - assert(doc.value.address.street === mockUser2.address.street) - } - - asserts(byId) - asserts(byPrimary) - asserts(bySecondary.result.at(0) ?? null) - }) + assert(doc !== null); + assert(doc.value.username === mockUser2.username); + assert(doc.value.age === mockUser2.age); + assert(doc.value.address.country === mockUser2.address.country); + assert(doc.value.address.city === mockUser2.address.city); + assert(doc.value.address.houseNr === mockUser2.address.houseNr); + assert(doc.value.address.street === mockUser2.address.street); + }; + + asserts(byId); + asserts(byPrimary); + asserts(bySecondary.result.at(0) ?? null); + }); }, - ) + ); await t.step("Should successfully parse and update document", async () => { await useDb(async (db) => { - let assertion = true + let assertion = true; - const cr = await db.zis_users.add(mockUser1) - assert(cr.ok) + const cr = await db.zis_users.add(mockUser1); + assert(cr.ok); await db.zis_users.updateByPrimaryIndex( "username", mockUser1.username, mockUser2, - ).catch(() => assertion = false) + ).catch(() => assertion = false); - assert(assertion) - }) - }) + assert(assertion); + }); + }); await t.step("Should fail to parse and update document", async () => { await useDb(async (db) => { - let assertion = false + let assertion = false; - const cr = await db.zis_users.add(mockUser1) - assert(cr.ok) + const cr = await db.zis_users.add(mockUser1); + assert(cr.ok); await db.zis_users.updateByPrimaryIndex( "username", mockUser1.username, mockUserInvalid, - ).catch(() => assertion = true) + ).catch(() => assertion = true); - assert(assertion) - }) - }) -}) + assert(assertion); + }); + }); +}); diff --git a/tests/serialized_indexable_collection/updateBySecondaryIndex.test.ts b/tests/serialized_indexable_collection/updateBySecondaryIndex.test.ts index 025e8fd..f8c1fdd 100644 --- a/tests/serialized_indexable_collection/updateBySecondaryIndex.test.ts +++ b/tests/serialized_indexable_collection/updateBySecondaryIndex.test.ts @@ -1,19 +1,19 @@ -import { assert } from "../test.deps.ts" -import { mockUser1, mockUserInvalid } from "../mocks.ts" -import { generateUsers, useDb } from "../utils.ts" +import { assert } from "../test.deps.ts"; +import { mockUser1, mockUserInvalid } from "../mocks.ts"; +import { generateUsers, useDb } from "../utils.ts"; Deno.test("serialized_indexable_collection - updateBySecondaryIndex", async (t) => { await t.step( "Should update 1000 documents of KvObject type using shallow merge", async () => { await useDb(async (db) => { - const users = generateUsers(1_000) - const cr = await db.is_users.addMany(users) - assert(cr.ok) + const users = generateUsers(1_000); + const cr = await db.is_users.addMany(users); + assert(cr.ok); - const docs = await db.is_users.getMany() - const ids = docs.result.map((doc) => doc.id) - const versionstamps = docs.result.map((doc) => doc.versionstamp) + const docs = await db.is_users.getMany(); + const ids = docs.result.map((doc) => doc.id); + const versionstamps = docs.result.map((doc) => doc.versionstamp); const updateData = { address: { @@ -21,7 +21,7 @@ Deno.test("serialized_indexable_collection - updateBySecondaryIndex", async (t) city: "Dublin", houseNr: null, }, - } + }; const { result } = await db.is_users.updateBySecondaryIndex( "age", @@ -30,40 +30,40 @@ Deno.test("serialized_indexable_collection - updateBySecondaryIndex", async (t) { strategy: "merge-shallow", }, - ) + ); assert( result.every((cr) => cr.ok && ids.includes(cr.id) && !versionstamps.includes(cr.versionstamp) ), - ) + ); await db.is_users.forEachBySecondaryIndex( "age", users[0].age, (doc) => { - assert(doc.value.address.country === updateData.address.country) - assert(doc.value.address.city === updateData.address.city) - assert(doc.value.address.houseNr === updateData.address.houseNr) - assert(typeof doc.value.address.street === "undefined") + assert(doc.value.address.country === updateData.address.country); + assert(doc.value.address.city === updateData.address.city); + assert(doc.value.address.houseNr === updateData.address.houseNr); + assert(typeof doc.value.address.street === "undefined"); }, - ) - }) + ); + }); }, - ) + ); await t.step( "Should update 1000 documents of KvObject type using deep merge", async () => { await useDb(async (db) => { - const users = generateUsers(1_000) - const cr = await db.is_users.addMany(users) - assert(cr.ok) + const users = generateUsers(1_000); + const cr = await db.is_users.addMany(users); + assert(cr.ok); - const docs = await db.is_users.getMany() - const ids = docs.result.map((doc) => doc.id) - const versionstamps = docs.result.map((doc) => doc.versionstamp) + const docs = await db.is_users.getMany(); + const ids = docs.result.map((doc) => doc.id); + const versionstamps = docs.result.map((doc) => doc.versionstamp); const updateData = { address: { @@ -71,7 +71,7 @@ Deno.test("serialized_indexable_collection - updateBySecondaryIndex", async (t) city: "Dublin", houseNr: null, }, - } + }; const { result } = await db.is_users.updateBySecondaryIndex( "age", @@ -80,40 +80,40 @@ Deno.test("serialized_indexable_collection - updateBySecondaryIndex", async (t) { strategy: "merge", }, - ) + ); assert( result.every((cr) => cr.ok && ids.includes(cr.id) && !versionstamps.includes(cr.versionstamp) ), - ) + ); await db.is_users.forEachBySecondaryIndex( "age", users[0].age, (doc) => { - assert(doc.value.address.country === updateData.address.country) - assert(doc.value.address.city === updateData.address.city) - assert(doc.value.address.houseNr === updateData.address.houseNr) - assert(doc.value.address.street !== undefined) + assert(doc.value.address.country === updateData.address.country); + assert(doc.value.address.city === updateData.address.city); + assert(doc.value.address.houseNr === updateData.address.houseNr); + assert(doc.value.address.street !== undefined); }, - ) - }) + ); + }); }, - ) + ); await t.step( "Should only update one document of type KvObject using replace (primary index collision)", async () => { await useDb(async (db) => { - const users = generateUsers(1_000) - const cr = await db.is_users.addMany(users) - assert(cr.ok) + const users = generateUsers(1_000); + const cr = await db.is_users.addMany(users); + assert(cr.ok); - const docs = await db.is_users.getMany() - const ids = docs.result.map((doc) => doc.id) - const versionstamps = docs.result.map((doc) => doc.versionstamp) + const docs = await db.is_users.getMany(); + const ids = docs.result.map((doc) => doc.id); + const versionstamps = docs.result.map((doc) => doc.versionstamp); const { result } = await db.is_users.updateBySecondaryIndex( "age", @@ -122,81 +122,81 @@ Deno.test("serialized_indexable_collection - updateBySecondaryIndex", async (t) { strategy: "replace", }, - ) + ); assert( result.some((cr) => cr.ok && ids.includes(cr.id) && !versionstamps.includes(cr.versionstamp) ), - ) + ); assert( result.some((cr) => !cr.ok), - ) + ); const byPrimary = await db.is_users.findByPrimaryIndex( "username", mockUser1.username, - ) + ); const { result: bySecondaryDocs } = await db.is_users .findBySecondaryIndex( "age", mockUser1.age, - ) + ); const bySecondary = bySecondaryDocs.find((doc) => doc.value.username === mockUser1.username - ) ?? null - - assert(byPrimary !== null) - assert(byPrimary.value.username === mockUser1.username) - assert(byPrimary.value.address.country === mockUser1.address.country) - assert(byPrimary.value.address.city === mockUser1.address.city) - assert(byPrimary.value.address.houseNr === mockUser1.address.houseNr) - assert(byPrimary.value.address.street === mockUser1.address.street) - - assert(bySecondary !== null) - assert(bySecondary.value.username === mockUser1.username) - assert(bySecondary.value.address.country === mockUser1.address.country) - assert(bySecondary.value.address.city === mockUser1.address.city) - assert(bySecondary.value.address.houseNr === mockUser1.address.houseNr) - assert(bySecondary.value.address.street === mockUser1.address.street) - }) + ) ?? null; + + assert(byPrimary !== null); + assert(byPrimary.value.username === mockUser1.username); + assert(byPrimary.value.address.country === mockUser1.address.country); + assert(byPrimary.value.address.city === mockUser1.address.city); + assert(byPrimary.value.address.houseNr === mockUser1.address.houseNr); + assert(byPrimary.value.address.street === mockUser1.address.street); + + assert(bySecondary !== null); + assert(bySecondary.value.username === mockUser1.username); + assert(bySecondary.value.address.country === mockUser1.address.country); + assert(bySecondary.value.address.city === mockUser1.address.city); + assert(bySecondary.value.address.houseNr === mockUser1.address.houseNr); + assert(bySecondary.value.address.street === mockUser1.address.street); + }); }, - ) + ); await t.step("Should successfully parse and update", async () => { await useDb(async (db) => { - const users = generateUsers(10) - let assertion = true + const users = generateUsers(10); + let assertion = true; - const cr = await db.zis_users.addMany(users) - assert(cr.ok) + const cr = await db.zis_users.addMany(users); + assert(cr.ok); await db.zis_users.updateBySecondaryIndex("age", users[0].age, mockUser1) - .catch(() => assertion = false) + .catch(() => assertion = false); - assert(assertion) - }) - }) + assert(assertion); + }); + }); await t.step("Should fail to parse and update document", async () => { await useDb(async (db) => { - const users = generateUsers(10) - let assertion = false + const users = generateUsers(10); + let assertion = false; - const cr = await db.zis_users.addMany(users) - assert(cr.ok) + const cr = await db.zis_users.addMany(users); + assert(cr.ok); await db.zis_users.updateBySecondaryIndex( "age", users[0].age, mockUserInvalid, - ).catch(() => assertion = true) + ).catch(() => assertion = true); - assert(assertion) - }) - }) -}) + assert(assertion); + }); + }); +}); diff --git a/tests/serialized_indexable_collection/updateMany.test.ts b/tests/serialized_indexable_collection/updateMany.test.ts index 7e7a415..190b2e4 100644 --- a/tests/serialized_indexable_collection/updateMany.test.ts +++ b/tests/serialized_indexable_collection/updateMany.test.ts @@ -1,19 +1,19 @@ -import { assert } from "../test.deps.ts" -import { mockUser1, mockUserInvalid } from "../mocks.ts" -import { generateUsers, useDb } from "../utils.ts" +import { assert } from "../test.deps.ts"; +import { mockUser1, mockUserInvalid } from "../mocks.ts"; +import { generateUsers, useDb } from "../utils.ts"; Deno.test("serialized_indexable_collection - updateMany", async (t) => { await t.step( "Should update 1000 documents of KvObject type using shallow merge", async () => { await useDb(async (db) => { - const users = generateUsers(1_000) - const cr = await db.is_users.addMany(users) - assert(cr.ok) + const users = generateUsers(1_000); + const cr = await db.is_users.addMany(users); + assert(cr.ok); - const docs = await db.is_users.getMany() - const ids = docs.result.map((doc) => doc.id) - const versionstamps = docs.result.map((doc) => doc.versionstamp) + const docs = await db.is_users.getMany(); + const ids = docs.result.map((doc) => doc.id); + const versionstamps = docs.result.map((doc) => doc.versionstamp); const updateData = { address: { @@ -21,40 +21,40 @@ Deno.test("serialized_indexable_collection - updateMany", async (t) => { city: "Dublin", houseNr: null, }, - } + }; const { result } = await db.is_users.updateMany(updateData, { strategy: "merge-shallow", - }) + }); assert( result.every((cr) => cr.ok && ids.includes(cr.id) && !versionstamps.includes(cr.versionstamp) ), - ) + ); await db.is_users.forEach((doc) => { - assert(doc.value.address.country === updateData.address.country) - assert(doc.value.address.city === updateData.address.city) - assert(doc.value.address.houseNr === updateData.address.houseNr) - assert(typeof doc.value.address.street === "undefined") - }) - }) + assert(doc.value.address.country === updateData.address.country); + assert(doc.value.address.city === updateData.address.city); + assert(doc.value.address.houseNr === updateData.address.houseNr); + assert(typeof doc.value.address.street === "undefined"); + }); + }); }, - ) + ); await t.step( "Should update 1000 documents of KvObject type using deep merge", async () => { await useDb(async (db) => { - const users = generateUsers(1_000) - const cr = await db.is_users.addMany(users) - assert(cr.ok) + const users = generateUsers(1_000); + const cr = await db.is_users.addMany(users); + assert(cr.ok); - const docs = await db.is_users.getMany() - const ids = docs.result.map((doc) => doc.id) - const versionstamps = docs.result.map((doc) => doc.versionstamp) + const docs = await db.is_users.getMany(); + const ids = docs.result.map((doc) => doc.id); + const versionstamps = docs.result.map((doc) => doc.versionstamp); const updateData = { address: { @@ -62,111 +62,111 @@ Deno.test("serialized_indexable_collection - updateMany", async (t) => { city: "Dublin", houseNr: null, }, - } + }; const { result } = await db.is_users.updateMany(updateData, { strategy: "merge", - }) + }); assert( result.every((cr) => cr.ok && ids.includes(cr.id) && !versionstamps.includes(cr.versionstamp) ), - ) + ); await db.is_users.forEach((doc) => { - assert(doc.value.address.country === updateData.address.country) - assert(doc.value.address.city === updateData.address.city) - assert(doc.value.address.houseNr === updateData.address.houseNr) - assert(doc.value.address.street !== undefined) - }) - }) + assert(doc.value.address.country === updateData.address.country); + assert(doc.value.address.city === updateData.address.city); + assert(doc.value.address.houseNr === updateData.address.houseNr); + assert(doc.value.address.street !== undefined); + }); + }); }, - ) + ); await t.step( "Should only update one document of type KvObject using replace (primary index collision)", async () => { await useDb(async (db) => { - const users = generateUsers(1_000) - const cr = await db.is_users.addMany(users) - assert(cr.ok) + const users = generateUsers(1_000); + const cr = await db.is_users.addMany(users); + assert(cr.ok); - const docs = await db.is_users.getMany() - const ids = docs.result.map((doc) => doc.id) - const versionstamps = docs.result.map((doc) => doc.versionstamp) + const docs = await db.is_users.getMany(); + const ids = docs.result.map((doc) => doc.id); + const versionstamps = docs.result.map((doc) => doc.versionstamp); const { result } = await db.is_users.updateMany(mockUser1, { strategy: "replace", - }) + }); assert( result.some((cr) => cr.ok && ids.includes(cr.id) && !versionstamps.includes(cr.versionstamp) ), - ) + ); assert( result.some((cr) => !cr.ok), - ) + ); const byPrimary = await db.is_users.findByPrimaryIndex( "username", mockUser1.username, - ) + ); const { result: [bySecondary] } = await db.is_users .findBySecondaryIndex( "age", mockUser1.age, - ) - - assert(byPrimary !== null) - assert(byPrimary.value.username === mockUser1.username) - assert(byPrimary.value.address.country === mockUser1.address.country) - assert(byPrimary.value.address.city === mockUser1.address.city) - assert(byPrimary.value.address.houseNr === mockUser1.address.houseNr) - assert(byPrimary.value.address.street === mockUser1.address.street) - - assert(bySecondary !== null) - assert(bySecondary.value.username === mockUser1.username) - assert(bySecondary.value.address.country === mockUser1.address.country) - assert(bySecondary.value.address.city === mockUser1.address.city) - assert(bySecondary.value.address.houseNr === mockUser1.address.houseNr) - assert(bySecondary.value.address.street === mockUser1.address.street) - }) + ); + + assert(byPrimary !== null); + assert(byPrimary.value.username === mockUser1.username); + assert(byPrimary.value.address.country === mockUser1.address.country); + assert(byPrimary.value.address.city === mockUser1.address.city); + assert(byPrimary.value.address.houseNr === mockUser1.address.houseNr); + assert(byPrimary.value.address.street === mockUser1.address.street); + + assert(bySecondary !== null); + assert(bySecondary.value.username === mockUser1.username); + assert(bySecondary.value.address.country === mockUser1.address.country); + assert(bySecondary.value.address.city === mockUser1.address.city); + assert(bySecondary.value.address.houseNr === mockUser1.address.houseNr); + assert(bySecondary.value.address.street === mockUser1.address.street); + }); }, - ) + ); await t.step("Should successfully parse and update", async () => { await useDb(async (db) => { - const users = generateUsers(10) - let assertion = true + const users = generateUsers(10); + let assertion = true; - const cr = await db.zis_users.addMany(users) - assert(cr.ok) + const cr = await db.zis_users.addMany(users); + assert(cr.ok); - await db.zis_users.updateMany(mockUser1).catch(() => assertion = false) + await db.zis_users.updateMany(mockUser1).catch(() => assertion = false); - assert(assertion) - }) - }) + assert(assertion); + }); + }); await t.step("Should fail to parse and update document", async () => { await useDb(async (db) => { - const users = generateUsers(10) - let assertion = false + const users = generateUsers(10); + let assertion = false; - const cr = await db.zis_users.addMany(users) - assert(cr.ok) + const cr = await db.zis_users.addMany(users); + assert(cr.ok); await db.zis_users.updateMany(mockUserInvalid).catch(() => assertion = true - ) + ); - assert(assertion) - }) - }) -}) + assert(assertion); + }); + }); +}); diff --git a/tests/serialized_indexable_collection/updateManyBySecondaryOrder.test.ts b/tests/serialized_indexable_collection/updateManyBySecondaryOrder.test.ts index c6c0e68..5e4cfd9 100644 --- a/tests/serialized_indexable_collection/updateManyBySecondaryOrder.test.ts +++ b/tests/serialized_indexable_collection/updateManyBySecondaryOrder.test.ts @@ -1,12 +1,12 @@ -import { assert, assertEquals } from "../test.deps.ts" +import { assert, assertEquals } from "../test.deps.ts"; import { mockUser1, mockUser2, mockUserInvalid, mockUsersWithAlteredAge, -} from "../mocks.ts" -import { generateUsers, useDb } from "../utils.ts" -import type { User } from "../models.ts" +} from "../mocks.ts"; +import { generateUsers, useDb } from "../utils.ts"; +import type { User } from "../models.ts"; Deno.test.ignore( "serialized_indexable_collection - updateManyBySecondaryOrder", @@ -15,12 +15,12 @@ Deno.test.ignore( "Should update documents of KvObject type using shallow merge by secondary order", async () => { await useDb(async (db) => { - const cr = await db.is_users.addMany(mockUsersWithAlteredAge) - assert(cr.ok) + const cr = await db.is_users.addMany(mockUsersWithAlteredAge); + assert(cr.ok); - const docs = await db.is_users.getMany() - const ids = docs.result.map((doc) => doc.id) - const versionstamps = docs.result.map((doc) => doc.versionstamp) + const docs = await db.is_users.getMany(); + const ids = docs.result.map((doc) => doc.id); + const versionstamps = docs.result.map((doc) => doc.versionstamp); const updateData = { address: { @@ -28,7 +28,7 @@ Deno.test.ignore( city: "Dublin", houseNr: null, }, - } + }; const { result } = await db.is_users.updateManyBySecondaryOrder( "age", @@ -37,44 +37,44 @@ Deno.test.ignore( limit: 2, strategy: "merge-shallow", }, - ) + ); assert( result.every((cr) => cr.ok && ids.includes(cr.id) && !versionstamps.includes(cr.versionstamp) ), - ) + ); await db.is_users.forEachBySecondaryOrder("age", (doc) => { - assert(doc.value.address.country === updateData.address.country) - assert(doc.value.address.city === updateData.address.city) - assert(doc.value.address.houseNr === updateData.address.houseNr) - assert(typeof doc.value.address.street === "undefined") + assert(doc.value.address.country === updateData.address.country); + assert(doc.value.address.city === updateData.address.city); + assert(doc.value.address.houseNr === updateData.address.houseNr); + assert(typeof doc.value.address.street === "undefined"); }, { limit: 2, - }) + }); const last = await db.is_users.getOneBySecondaryOrder("age", { reverse: true, - }) + }); - assert(last?.value.username === mockUser2.username) - assert(last.value.address.country === mockUser2.address.country) - }) + assert(last?.value.username === mockUser2.username); + assert(last.value.address.country === mockUser2.address.country); + }); }, - ) + ); await t.step( "Should update documents of KvObject type using deep merge", async () => { await useDb(async (db) => { - const cr = await db.is_users.addMany(mockUsersWithAlteredAge) - assert(cr.ok) + const cr = await db.is_users.addMany(mockUsersWithAlteredAge); + assert(cr.ok); - const docs = await db.is_users.getMany() - const ids = docs.result.map((doc) => doc.id) - const versionstamps = docs.result.map((doc) => doc.versionstamp) + const docs = await db.is_users.getMany(); + const ids = docs.result.map((doc) => doc.id); + const versionstamps = docs.result.map((doc) => doc.versionstamp); const updateData = { address: { @@ -82,7 +82,7 @@ Deno.test.ignore( city: "Dublin", houseNr: null, }, - } + }; const { result } = await db.is_users.updateManyBySecondaryOrder( "age", @@ -91,42 +91,42 @@ Deno.test.ignore( limit: 2, strategy: "merge", }, - ) + ); assert( result.every((cr) => cr.ok && ids.includes(cr.id) && !versionstamps.includes(cr.versionstamp) ), - ) + ); await db.is_users.forEachBySecondaryOrder("age", (doc) => { - assert(doc.value.address.country === updateData.address.country) - assert(doc.value.address.city === updateData.address.city) - assert(doc.value.address.houseNr === updateData.address.houseNr) - assert(doc.value.address.street !== undefined) - }, { limit: 2 }) + assert(doc.value.address.country === updateData.address.country); + assert(doc.value.address.city === updateData.address.city); + assert(doc.value.address.houseNr === updateData.address.houseNr); + assert(doc.value.address.street !== undefined); + }, { limit: 2 }); const last = await db.is_users.getOneBySecondaryOrder("age", { reverse: true, - }) + }); - assert(last?.value.username === mockUser2.username) - assert(last.value.address.country === mockUser2.address.country) - }) + assert(last?.value.username === mockUser2.username); + assert(last.value.address.country === mockUser2.address.country); + }); }, - ) + ); await t.step( "Should only update one document of type KvObject using replace (primary index collision)", async () => { await useDb(async (db) => { - const cr = await db.is_users.addMany(mockUsersWithAlteredAge) - assert(cr.ok) + const cr = await db.is_users.addMany(mockUsersWithAlteredAge); + assert(cr.ok); - const docs = await db.is_users.getMany() - const ids = docs.result.map((doc) => doc.id) - const versionstamps = docs.result.map((doc) => doc.versionstamp) + const docs = await db.is_users.getMany(); + const ids = docs.result.map((doc) => doc.id); + const versionstamps = docs.result.map((doc) => doc.versionstamp); const updateData: User = { username: "test", @@ -136,7 +136,7 @@ Deno.test.ignore( city: "Trondheim", houseNr: 10, }, - } + }; const { result: crs } = await db.is_users.updateManyBySecondaryOrder( "age", @@ -144,75 +144,75 @@ Deno.test.ignore( { strategy: "replace", }, - ) + ); assert( crs.some((cr) => cr.ok && ids.includes(cr.id) && !versionstamps.includes(cr.versionstamp) ), - ) + ); assert( crs.some((cr) => !cr.ok), - ) + ); const { result } = await db.is_users.mapBySecondaryOrder( "age", (doc) => doc.value, - ) - - assertEquals(result[0].username, updateData.username) - assertEquals(result[0].address.country, updateData.address.country) - assertEquals(result[0].address.city, updateData.address.city) - assertEquals(result[0].address.houseNr, updateData.address.houseNr) - assertEquals(result[0].address.street, updateData.address.street) - - assertEquals(result[1].username, mockUser1.username) - assertEquals(result[1].address.country, mockUser1.address.country) - assertEquals(result[1].address.city, mockUser1.address.city) - assertEquals(result[1].address.houseNr, mockUser1.address.houseNr) - assertEquals(result[1].address.street, mockUser1.address.street) - - assertEquals(result[2].username, mockUser2.username) - assertEquals(result[2].address.country, mockUser2.address.country) - assertEquals(result[2].address.city, mockUser2.address.city) - assertEquals(result[2].address.houseNr, mockUser2.address.houseNr) - assertEquals(result[2].address.street, mockUser2.address.street) - }) + ); + + assertEquals(result[0].username, updateData.username); + assertEquals(result[0].address.country, updateData.address.country); + assertEquals(result[0].address.city, updateData.address.city); + assertEquals(result[0].address.houseNr, updateData.address.houseNr); + assertEquals(result[0].address.street, updateData.address.street); + + assertEquals(result[1].username, mockUser1.username); + assertEquals(result[1].address.country, mockUser1.address.country); + assertEquals(result[1].address.city, mockUser1.address.city); + assertEquals(result[1].address.houseNr, mockUser1.address.houseNr); + assertEquals(result[1].address.street, mockUser1.address.street); + + assertEquals(result[2].username, mockUser2.username); + assertEquals(result[2].address.country, mockUser2.address.country); + assertEquals(result[2].address.city, mockUser2.address.city); + assertEquals(result[2].address.houseNr, mockUser2.address.houseNr); + assertEquals(result[2].address.street, mockUser2.address.street); + }); }, - ) + ); await t.step("Should successfully parse and update", async () => { await useDb(async (db) => { - const users = generateUsers(10) - let assertion = true + const users = generateUsers(10); + let assertion = true; - const cr = await db.zis_users.addMany(users) - assert(cr.ok) + const cr = await db.zis_users.addMany(users); + assert(cr.ok); await db.zis_users.updateManyBySecondaryOrder("age", mockUser1) - .catch(() => assertion = false) + .catch(() => assertion = false); - assert(assertion) - }) - }) + assert(assertion); + }); + }); await t.step("Should fail to parse and update document", async () => { await useDb(async (db) => { - const users = generateUsers(10) - let assertion = false + const users = generateUsers(10); + let assertion = false; - const cr = await db.zis_users.addMany(users) - assert(cr.ok) + const cr = await db.zis_users.addMany(users); + assert(cr.ok); await db.zis_users.updateManyBySecondaryOrder( "age", mockUserInvalid, - ).catch(() => assertion = true) + ).catch(() => assertion = true); - assert(assertion) - }) - }) + assert(assertion); + }); + }); }, -) +); diff --git a/tests/serialized_indexable_collection/updateOne.test.ts b/tests/serialized_indexable_collection/updateOne.test.ts index 4cb832c..131df89 100644 --- a/tests/serialized_indexable_collection/updateOne.test.ts +++ b/tests/serialized_indexable_collection/updateOne.test.ts @@ -1,18 +1,18 @@ -import { assert } from "../test.deps.ts" -import { mockUser1, mockUser2, mockUser3, mockUserInvalid } from "../mocks.ts" -import { sleep, useDb } from "../utils.ts" +import { assert } from "../test.deps.ts"; +import { mockUser1, mockUser2, mockUser3, mockUserInvalid } from "../mocks.ts"; +import { sleep, useDb } from "../utils.ts"; Deno.test("serialized_indexable_collection - updateOne", async (t) => { await t.step( "Should update only one document of KvObject type using shallow merge", async () => { await useDb(async (db) => { - const cr1 = await db.is_users.add(mockUser1) - await sleep(10) - const cr2 = await db.is_users.add(mockUser2) + const cr1 = await db.is_users.add(mockUser1); + await sleep(10); + const cr2 = await db.is_users.add(mockUser2); - assert(cr1.ok) - assert(cr2.ok) + assert(cr1.ok); + assert(cr2.ok); const updateData = { address: { @@ -20,43 +20,43 @@ Deno.test("serialized_indexable_collection - updateOne", async (t) => { city: "Dublin", houseNr: null, }, - } + }; const updateCr = await db.is_users.updateOne(updateData, { strategy: "merge-shallow", - }) + }); - assert(updateCr.ok) + assert(updateCr.ok); - const doc1 = await db.is_users.find(cr1.id) - const doc2 = await db.is_users.find(cr2.id) + const doc1 = await db.is_users.find(cr1.id); + const doc2 = await db.is_users.find(cr2.id); - assert(doc1) - assert(doc2) + assert(doc1); + assert(doc2); - assert(doc1.value.address.country === updateData.address.country) - assert(doc1.value.address.city === updateData.address.city) - assert(doc1.value.address.houseNr === updateData.address.houseNr) - assert(doc1.value.address.street === undefined) + assert(doc1.value.address.country === updateData.address.country); + assert(doc1.value.address.city === updateData.address.city); + assert(doc1.value.address.houseNr === updateData.address.houseNr); + assert(doc1.value.address.street === undefined); - assert(doc2.value.address.country === mockUser2.address.country) - assert(doc2.value.address.city === mockUser2.address.city) - assert(doc2.value.address.houseNr === mockUser2.address.houseNr) - assert(doc2.value.address.street === mockUser2.address.street) - }) + assert(doc2.value.address.country === mockUser2.address.country); + assert(doc2.value.address.city === mockUser2.address.city); + assert(doc2.value.address.houseNr === mockUser2.address.houseNr); + assert(doc2.value.address.street === mockUser2.address.street); + }); }, - ) + ); await t.step( "Should update only one document of KvObject type using deep merge", async () => { await useDb(async (db) => { - const cr1 = await db.is_users.add(mockUser1) - await sleep(10) - const cr2 = await db.is_users.add(mockUser2) + const cr1 = await db.is_users.add(mockUser1); + await sleep(10); + const cr2 = await db.is_users.add(mockUser2); - assert(cr1.ok) - assert(cr2.ok) + assert(cr1.ok); + assert(cr2.ok); const updateData = { address: { @@ -64,108 +64,108 @@ Deno.test("serialized_indexable_collection - updateOne", async (t) => { city: "Dublin", houseNr: null, }, - } + }; const updateCr = await db.is_users.updateOne(updateData, { strategy: "merge", - }) + }); - assert(updateCr.ok) + assert(updateCr.ok); - const doc1 = await db.is_users.find(cr1.id) - const doc2 = await db.is_users.find(cr2.id) + const doc1 = await db.is_users.find(cr1.id); + const doc2 = await db.is_users.find(cr2.id); - assert(doc1) - assert(doc2) + assert(doc1); + assert(doc2); - assert(doc1.value.address.country === updateData.address.country) - assert(doc1.value.address.city === updateData.address.city) - assert(doc1.value.address.houseNr === updateData.address.houseNr) - assert(doc1.value.address.street === mockUser1.address.street) + assert(doc1.value.address.country === updateData.address.country); + assert(doc1.value.address.city === updateData.address.city); + assert(doc1.value.address.houseNr === updateData.address.houseNr); + assert(doc1.value.address.street === mockUser1.address.street); - assert(doc2.value.address.country === mockUser2.address.country) - assert(doc2.value.address.city === mockUser2.address.city) - assert(doc2.value.address.houseNr === mockUser2.address.houseNr) - assert(doc2.value.address.street === mockUser2.address.street) - }) + assert(doc2.value.address.country === mockUser2.address.country); + assert(doc2.value.address.city === mockUser2.address.city); + assert(doc2.value.address.houseNr === mockUser2.address.houseNr); + assert(doc2.value.address.street === mockUser2.address.street); + }); }, - ) + ); await t.step( "Should update only one document of KvObject type using replace", async () => { await useDb(async (db) => { - const cr1 = await db.is_users.add(mockUser1) - await sleep(10) - const cr2 = await db.is_users.add(mockUser2) + const cr1 = await db.is_users.add(mockUser1); + await sleep(10); + const cr2 = await db.is_users.add(mockUser2); - assert(cr1.ok) - assert(cr2.ok) + assert(cr1.ok); + assert(cr2.ok); - const updateData = mockUser3 + const updateData = mockUser3; const updateCr = await db.is_users.updateOne(updateData, { strategy: "replace", - }) - - assert(updateCr.ok) - - const doc1 = await db.is_users.find(cr1.id) - const doc2 = await db.is_users.find(cr2.id) - - assert(doc1) - assert(doc2) - - assert(doc1.value.username === updateData.username) - assert(doc1.value.age === updateData.age) - assert(doc1.value.address.country === updateData.address.country) - assert(doc1.value.address.city === updateData.address.city) - assert(doc1.value.address.houseNr === updateData.address.houseNr) - assert(doc1.value.address.street === undefined) - - assert(doc2.value.username === mockUser2.username) - assert(doc2.value.age === mockUser2.age) - assert(doc2.value.address.country === mockUser2.address.country) - assert(doc2.value.address.city === mockUser2.address.city) - assert(doc2.value.address.houseNr === mockUser2.address.houseNr) - assert(doc2.value.address.street === mockUser2.address.street) - }) + }); + + assert(updateCr.ok); + + const doc1 = await db.is_users.find(cr1.id); + const doc2 = await db.is_users.find(cr2.id); + + assert(doc1); + assert(doc2); + + assert(doc1.value.username === updateData.username); + assert(doc1.value.age === updateData.age); + assert(doc1.value.address.country === updateData.address.country); + assert(doc1.value.address.city === updateData.address.city); + assert(doc1.value.address.houseNr === updateData.address.houseNr); + assert(doc1.value.address.street === undefined); + + assert(doc2.value.username === mockUser2.username); + assert(doc2.value.age === mockUser2.age); + assert(doc2.value.address.country === mockUser2.address.country); + assert(doc2.value.address.city === mockUser2.address.city); + assert(doc2.value.address.houseNr === mockUser2.address.houseNr); + assert(doc2.value.address.street === mockUser2.address.street); + }); }, - ) + ); await t.step("Should successfully parse and update", async () => { await useDb(async (db) => { - let assertion = true + let assertion = true; - const cr1 = await db.zis_users.add(mockUser1) - await sleep(10) - const cr2 = await db.zis_users.add(mockUser2) + const cr1 = await db.zis_users.add(mockUser1); + await sleep(10); + const cr2 = await db.zis_users.add(mockUser2); - assert(cr1.ok) - assert(cr2.ok) + assert(cr1.ok); + assert(cr2.ok); - await db.zis_users.updateOne(mockUser1).catch(() => assertion = false) + await db.zis_users.updateOne(mockUser1).catch(() => assertion = false); - assert(assertion) - }) - }) + assert(assertion); + }); + }); await t.step("Should fail to parse and update document", async () => { await useDb(async (db) => { - let assertion = false + let assertion = false; - const cr1 = await db.zis_users.add(mockUser1) - await sleep(10) - const cr2 = await db.zis_users.add(mockUser2) + const cr1 = await db.zis_users.add(mockUser1); + await sleep(10); + const cr2 = await db.zis_users.add(mockUser2); - assert(cr1.ok) - assert(cr2.ok) + assert(cr1.ok); + assert(cr2.ok); await db.zis_users.updateOne(mockUserInvalid).catch(() => assertion = true - ) + ); - assert(assertion) - }) - }) -}) + assert(assertion); + }); + }); +}); diff --git a/tests/serialized_indexable_collection/updateOneBySecondaryIndex.test.ts b/tests/serialized_indexable_collection/updateOneBySecondaryIndex.test.ts index 1589ceb..0c5f0f0 100644 --- a/tests/serialized_indexable_collection/updateOneBySecondaryIndex.test.ts +++ b/tests/serialized_indexable_collection/updateOneBySecondaryIndex.test.ts @@ -1,18 +1,18 @@ -import { assert } from "../test.deps.ts" -import { mockUser1, mockUser2, mockUser3, mockUserInvalid } from "../mocks.ts" -import { sleep, useDb } from "../utils.ts" +import { assert } from "../test.deps.ts"; +import { mockUser1, mockUser2, mockUser3, mockUserInvalid } from "../mocks.ts"; +import { sleep, useDb } from "../utils.ts"; Deno.test("serialized_indexable_collection - updateOneBySecondaryIndex", async (t) => { await t.step( "Should update only one document of KvObject type using shallow merge", async () => { await useDb(async (db) => { - const cr1 = await db.is_users.add(mockUser1) - await sleep(10) - const cr2 = await db.is_users.add(mockUser2) + const cr1 = await db.is_users.add(mockUser1); + await sleep(10); + const cr2 = await db.is_users.add(mockUser2); - assert(cr1.ok) - assert(cr2.ok) + assert(cr1.ok); + assert(cr2.ok); const updateData = { address: { @@ -20,7 +20,7 @@ Deno.test("serialized_indexable_collection - updateOneBySecondaryIndex", async ( city: "Dublin", houseNr: null, }, - } + }; const updateCr = await db.is_users.updateOneBySecondaryIndex( "age", @@ -29,39 +29,39 @@ Deno.test("serialized_indexable_collection - updateOneBySecondaryIndex", async ( { strategy: "merge-shallow", }, - ) + ); - assert(updateCr.ok) + assert(updateCr.ok); - const doc1 = await db.is_users.find(cr1.id) - const doc2 = await db.is_users.find(cr2.id) + const doc1 = await db.is_users.find(cr1.id); + const doc2 = await db.is_users.find(cr2.id); - assert(doc1) - assert(doc2) + assert(doc1); + assert(doc2); - assert(doc1.value.address.country === updateData.address.country) - assert(doc1.value.address.city === updateData.address.city) - assert(doc1.value.address.houseNr === updateData.address.houseNr) - assert(doc1.value.address.street === undefined) + assert(doc1.value.address.country === updateData.address.country); + assert(doc1.value.address.city === updateData.address.city); + assert(doc1.value.address.houseNr === updateData.address.houseNr); + assert(doc1.value.address.street === undefined); - assert(doc2.value.address.country === mockUser2.address.country) - assert(doc2.value.address.city === mockUser2.address.city) - assert(doc2.value.address.houseNr === mockUser2.address.houseNr) - assert(doc2.value.address.street === mockUser2.address.street) - }) + assert(doc2.value.address.country === mockUser2.address.country); + assert(doc2.value.address.city === mockUser2.address.city); + assert(doc2.value.address.houseNr === mockUser2.address.houseNr); + assert(doc2.value.address.street === mockUser2.address.street); + }); }, - ) + ); await t.step( "Should update only one document of KvObject type using deep merge", async () => { await useDb(async (db) => { - const cr1 = await db.is_users.add(mockUser1) - await sleep(10) - const cr2 = await db.is_users.add(mockUser2) + const cr1 = await db.is_users.add(mockUser1); + await sleep(10); + const cr2 = await db.is_users.add(mockUser2); - assert(cr1.ok) - assert(cr2.ok) + assert(cr1.ok); + assert(cr2.ok); const updateData = { address: { @@ -69,7 +69,7 @@ Deno.test("serialized_indexable_collection - updateOneBySecondaryIndex", async ( city: "Dublin", houseNr: null, }, - } + }; const updateCr = await db.is_users.updateOneBySecondaryIndex( "age", @@ -78,41 +78,41 @@ Deno.test("serialized_indexable_collection - updateOneBySecondaryIndex", async ( { strategy: "merge", }, - ) + ); - assert(updateCr.ok) + assert(updateCr.ok); - const doc1 = await db.is_users.find(cr1.id) - const doc2 = await db.is_users.find(cr2.id) + const doc1 = await db.is_users.find(cr1.id); + const doc2 = await db.is_users.find(cr2.id); - assert(doc1) - assert(doc2) + assert(doc1); + assert(doc2); - assert(doc1.value.address.country === updateData.address.country) - assert(doc1.value.address.city === updateData.address.city) - assert(doc1.value.address.houseNr === updateData.address.houseNr) - assert(doc1.value.address.street === mockUser1.address.street) + assert(doc1.value.address.country === updateData.address.country); + assert(doc1.value.address.city === updateData.address.city); + assert(doc1.value.address.houseNr === updateData.address.houseNr); + assert(doc1.value.address.street === mockUser1.address.street); - assert(doc2.value.address.country === mockUser2.address.country) - assert(doc2.value.address.city === mockUser2.address.city) - assert(doc2.value.address.houseNr === mockUser2.address.houseNr) - assert(doc2.value.address.street === mockUser2.address.street) - }) + assert(doc2.value.address.country === mockUser2.address.country); + assert(doc2.value.address.city === mockUser2.address.city); + assert(doc2.value.address.houseNr === mockUser2.address.houseNr); + assert(doc2.value.address.street === mockUser2.address.street); + }); }, - ) + ); await t.step( "Should update only one document of KvObject type using replace", async () => { await useDb(async (db) => { - const cr1 = await db.is_users.add(mockUser1) - await sleep(10) - const cr2 = await db.is_users.add(mockUser2) + const cr1 = await db.is_users.add(mockUser1); + await sleep(10); + const cr2 = await db.is_users.add(mockUser2); - assert(cr1.ok) - assert(cr2.ok) + assert(cr1.ok); + assert(cr2.ok); - const updateData = mockUser3 + const updateData = mockUser3; const updateCr = await db.is_users.updateOneBySecondaryIndex( "age", @@ -121,72 +121,72 @@ Deno.test("serialized_indexable_collection - updateOneBySecondaryIndex", async ( { strategy: "replace", }, - ) - - assert(updateCr.ok) - - const doc1 = await db.is_users.find(cr1.id) - const doc2 = await db.is_users.find(cr2.id) - - assert(doc1) - assert(doc2) - - assert(doc1.value.username === updateData.username) - assert(doc1.value.age === updateData.age) - assert(doc1.value.address.country === updateData.address.country) - assert(doc1.value.address.city === updateData.address.city) - assert(doc1.value.address.houseNr === updateData.address.houseNr) - assert(doc1.value.address.street === undefined) - - assert(doc2.value.username === mockUser2.username) - assert(doc2.value.age === mockUser2.age) - assert(doc2.value.address.country === mockUser2.address.country) - assert(doc2.value.address.city === mockUser2.address.city) - assert(doc2.value.address.houseNr === mockUser2.address.houseNr) - assert(doc2.value.address.street === mockUser2.address.street) - }) + ); + + assert(updateCr.ok); + + const doc1 = await db.is_users.find(cr1.id); + const doc2 = await db.is_users.find(cr2.id); + + assert(doc1); + assert(doc2); + + assert(doc1.value.username === updateData.username); + assert(doc1.value.age === updateData.age); + assert(doc1.value.address.country === updateData.address.country); + assert(doc1.value.address.city === updateData.address.city); + assert(doc1.value.address.houseNr === updateData.address.houseNr); + assert(doc1.value.address.street === undefined); + + assert(doc2.value.username === mockUser2.username); + assert(doc2.value.age === mockUser2.age); + assert(doc2.value.address.country === mockUser2.address.country); + assert(doc2.value.address.city === mockUser2.address.city); + assert(doc2.value.address.houseNr === mockUser2.address.houseNr); + assert(doc2.value.address.street === mockUser2.address.street); + }); }, - ) + ); await t.step("Should successfully parse and update", async () => { await useDb(async (db) => { - let assertion = true + let assertion = true; - const cr1 = await db.zis_users.add(mockUser1) - await sleep(10) - const cr2 = await db.zis_users.add(mockUser2) + const cr1 = await db.zis_users.add(mockUser1); + await sleep(10); + const cr2 = await db.zis_users.add(mockUser2); - assert(cr1.ok) - assert(cr2.ok) + assert(cr1.ok); + assert(cr2.ok); await db.zis_users.updateOneBySecondaryIndex( "age", mockUser2.age, mockUser1, - ).catch(() => assertion = false) + ).catch(() => assertion = false); - assert(assertion) - }) - }) + assert(assertion); + }); + }); await t.step("Should fail to parse and update document", async () => { await useDb(async (db) => { - let assertion = false + let assertion = false; - const cr1 = await db.zis_users.add(mockUser1) - await sleep(10) - const cr2 = await db.zis_users.add(mockUser2) + const cr1 = await db.zis_users.add(mockUser1); + await sleep(10); + const cr2 = await db.zis_users.add(mockUser2); - assert(cr1.ok) - assert(cr2.ok) + assert(cr1.ok); + assert(cr2.ok); await db.zis_users.updateOneBySecondaryIndex( "age", mockUser2.age, mockUserInvalid, - ).catch(() => assertion = true) + ).catch(() => assertion = true); - assert(assertion) - }) - }) -}) + assert(assertion); + }); + }); +}); diff --git a/tests/serialized_indexable_collection/updateOneBySecondaryOrder.test.ts b/tests/serialized_indexable_collection/updateOneBySecondaryOrder.test.ts index 1cf929c..0f96416 100644 --- a/tests/serialized_indexable_collection/updateOneBySecondaryOrder.test.ts +++ b/tests/serialized_indexable_collection/updateOneBySecondaryOrder.test.ts @@ -1,21 +1,21 @@ -import { assert } from "../test.deps.ts" +import { assert } from "../test.deps.ts"; import { mockUser1, mockUser2, mockUser3, mockUserInvalid, mockUsersWithAlteredAge, -} from "../mocks.ts" -import { useDb } from "../utils.ts" -import type { User } from "../models.ts" +} from "../mocks.ts"; +import { useDb } from "../utils.ts"; +import type { User } from "../models.ts"; Deno.test("serialized_indexable_collection - updateOneBySecondaryOrder", async (t) => { await t.step( "Should update only one document of KvObject type using shallow merge", async () => { await useDb(async (db) => { - const cr = await db.is_users.addMany(mockUsersWithAlteredAge) - assert(cr.ok) + const cr = await db.is_users.addMany(mockUsersWithAlteredAge); + assert(cr.ok); const updateData = { address: { @@ -23,7 +23,7 @@ Deno.test("serialized_indexable_collection - updateOneBySecondaryOrder", async ( city: "Dublin", houseNr: null, }, - } + }; const updateCr = await db.is_users.updateOneBySecondaryOrder( "age", @@ -31,39 +31,39 @@ Deno.test("serialized_indexable_collection - updateOneBySecondaryOrder", async ( { strategy: "merge-shallow", }, - ) + ); - assert(updateCr.ok) + assert(updateCr.ok); const { result } = await db.is_users.mapBySecondaryOrder( "age", (doc) => doc.value, - ) - - assert(result[0].address.country === updateData.address.country) - assert(result[0].address.city === updateData.address.city) - assert(result[0].address.houseNr === updateData.address.houseNr) - assert(result[0].address.street === undefined) - - assert(result[1].address.country === mockUser1.address.country) - assert(result[1].address.city === mockUser1.address.city) - assert(result[1].address.houseNr === mockUser1.address.houseNr) - assert(result[1].address.street === mockUser1.address.street) - - assert(result[2].address.country === mockUser2.address.country) - assert(result[2].address.city === mockUser2.address.city) - assert(result[2].address.houseNr === mockUser2.address.houseNr) - assert(result[2].address.street === mockUser2.address.street) - }) + ); + + assert(result[0].address.country === updateData.address.country); + assert(result[0].address.city === updateData.address.city); + assert(result[0].address.houseNr === updateData.address.houseNr); + assert(result[0].address.street === undefined); + + assert(result[1].address.country === mockUser1.address.country); + assert(result[1].address.city === mockUser1.address.city); + assert(result[1].address.houseNr === mockUser1.address.houseNr); + assert(result[1].address.street === mockUser1.address.street); + + assert(result[2].address.country === mockUser2.address.country); + assert(result[2].address.city === mockUser2.address.city); + assert(result[2].address.houseNr === mockUser2.address.houseNr); + assert(result[2].address.street === mockUser2.address.street); + }); }, - ) + ); await t.step( "Should update only one document of KvObject type using deep merge", async () => { await useDb(async (db) => { - const cr = await db.is_users.addMany(mockUsersWithAlteredAge) - assert(cr.ok) + const cr = await db.is_users.addMany(mockUsersWithAlteredAge); + assert(cr.ok); const updateData = { address: { @@ -71,7 +71,7 @@ Deno.test("serialized_indexable_collection - updateOneBySecondaryOrder", async ( city: "Dublin", houseNr: null, }, - } + }; const updateCr = await db.is_users.updateOneBySecondaryOrder( "age", @@ -80,39 +80,39 @@ Deno.test("serialized_indexable_collection - updateOneBySecondaryOrder", async ( offset: 1, strategy: "merge", }, - ) + ); - assert(updateCr.ok) + assert(updateCr.ok); const { result } = await db.is_users.mapBySecondaryOrder( "age", (doc) => doc.value, - ) - - assert(result[1].address.country === updateData.address.country) - assert(result[1].address.city === updateData.address.city) - assert(result[1].address.houseNr === updateData.address.houseNr) - assert(result[1].address.street === mockUser1.address.street) - - assert(result[0].address.country === mockUser3.address.country) - assert(result[0].address.city === mockUser3.address.city) - assert(result[0].address.houseNr === mockUser3.address.houseNr) - assert(result[0].address.street === mockUser3.address.street) - - assert(result[2].address.country === mockUser2.address.country) - assert(result[2].address.city === mockUser2.address.city) - assert(result[2].address.houseNr === mockUser2.address.houseNr) - assert(result[2].address.street === mockUser2.address.street) - }) + ); + + assert(result[1].address.country === updateData.address.country); + assert(result[1].address.city === updateData.address.city); + assert(result[1].address.houseNr === updateData.address.houseNr); + assert(result[1].address.street === mockUser1.address.street); + + assert(result[0].address.country === mockUser3.address.country); + assert(result[0].address.city === mockUser3.address.city); + assert(result[0].address.houseNr === mockUser3.address.houseNr); + assert(result[0].address.street === mockUser3.address.street); + + assert(result[2].address.country === mockUser2.address.country); + assert(result[2].address.city === mockUser2.address.city); + assert(result[2].address.houseNr === mockUser2.address.houseNr); + assert(result[2].address.street === mockUser2.address.street); + }); }, - ) + ); await t.step( "Should update only one document of KvObject type using replace", async () => { await useDb(async (db) => { - const cr = await db.is_users.addMany(mockUsersWithAlteredAge) - assert(cr.ok) + const cr = await db.is_users.addMany(mockUsersWithAlteredAge); + assert(cr.ok); const updateData: User = { username: "test", @@ -122,7 +122,7 @@ Deno.test("serialized_indexable_collection - updateOneBySecondaryOrder", async ( city: "Bern", houseNr: null, }, - } + }; const updateCr = await db.is_users.updateOneBySecondaryOrder( "age", @@ -130,43 +130,43 @@ Deno.test("serialized_indexable_collection - updateOneBySecondaryOrder", async ( { strategy: "replace", }, - ) + ); - assert(updateCr.ok) + assert(updateCr.ok); const { result } = await db.is_users.mapBySecondaryOrder( "age", (doc) => doc.value, - ) - - assert(result[0].username === updateData.username) - assert(result[0].age === updateData.age) - assert(result[0].address.country === updateData.address.country) - assert(result[0].address.city === updateData.address.city) - assert(result[0].address.houseNr === updateData.address.houseNr) - assert(result[0].address.street === undefined) - - assert(result[1].username === mockUser1.username) - assert(result[1].address.country === mockUser1.address.country) - assert(result[1].address.city === mockUser1.address.city) - assert(result[1].address.houseNr === mockUser1.address.houseNr) - assert(result[1].address.street === mockUser1.address.street) - - assert(result[2].username === mockUser2.username) - assert(result[2].address.country === mockUser2.address.country) - assert(result[2].address.city === mockUser2.address.city) - assert(result[2].address.houseNr === mockUser2.address.houseNr) - assert(result[2].address.street === mockUser2.address.street) - }) + ); + + assert(result[0].username === updateData.username); + assert(result[0].age === updateData.age); + assert(result[0].address.country === updateData.address.country); + assert(result[0].address.city === updateData.address.city); + assert(result[0].address.houseNr === updateData.address.houseNr); + assert(result[0].address.street === undefined); + + assert(result[1].username === mockUser1.username); + assert(result[1].address.country === mockUser1.address.country); + assert(result[1].address.city === mockUser1.address.city); + assert(result[1].address.houseNr === mockUser1.address.houseNr); + assert(result[1].address.street === mockUser1.address.street); + + assert(result[2].username === mockUser2.username); + assert(result[2].address.country === mockUser2.address.country); + assert(result[2].address.city === mockUser2.address.city); + assert(result[2].address.houseNr === mockUser2.address.houseNr); + assert(result[2].address.street === mockUser2.address.street); + }); }, - ) + ); await t.step("Should successfully parse and update", async () => { await useDb(async (db) => { - let assertion = true + let assertion = true; - const cr = await db.zis_users.addMany(mockUsersWithAlteredAge) - assert(cr.ok) + const cr = await db.zis_users.addMany(mockUsersWithAlteredAge); + assert(cr.ok); const updateData: User = { username: "test", @@ -176,30 +176,30 @@ Deno.test("serialized_indexable_collection - updateOneBySecondaryOrder", async ( city: "Bern", houseNr: null, }, - } + }; await db.zis_users.updateOneBySecondaryOrder( "age", updateData, - ).catch(() => assertion = false) + ).catch(() => assertion = false); - assert(assertion) - }) - }) + assert(assertion); + }); + }); await t.step("Should fail to parse and update document", async () => { await useDb(async (db) => { - let assertion = false + let assertion = false; - const cr = await db.zis_users.addMany(mockUsersWithAlteredAge) - assert(cr.ok) + const cr = await db.zis_users.addMany(mockUsersWithAlteredAge); + assert(cr.ok); await db.zis_users.updateOneBySecondaryOrder( "age", mockUserInvalid, - ).catch(() => assertion = true) + ).catch(() => assertion = true); - assert(assertion) - }) - }) -}) + assert(assertion); + }); + }); +}); diff --git a/tests/serialized_indexable_collection/upsert.test.ts b/tests/serialized_indexable_collection/upsert.test.ts index a7fc80d..9545688 100644 --- a/tests/serialized_indexable_collection/upsert.test.ts +++ b/tests/serialized_indexable_collection/upsert.test.ts @@ -1,35 +1,35 @@ -import { assert } from "../test.deps.ts" -import { mockUser1, mockUser2, mockUser3 } from "../mocks.ts" -import type { User } from "../models.ts" -import { useDb } from "../utils.ts" +import { assert } from "../test.deps.ts"; +import { mockUser1, mockUser2, mockUser3 } from "../mocks.ts"; +import type { User } from "../models.ts"; +import { useDb } from "../utils.ts"; Deno.test("serialized_indexable_collection - upsert", async (t) => { await t.step("Should set new doucment entry by id", async () => { await useDb(async (db) => { - const id = "id" + const id = "id"; const cr = await db.is_users.upsert({ id: id, set: mockUser2, update: mockUser3, - }) + }); - assert(cr.ok) + assert(cr.ok); - const doc = await db.is_users.find(id) - assert(doc !== null) - assert(doc.value.username === mockUser2.username) - }) - }) + const doc = await db.is_users.find(id); + assert(doc !== null); + assert(doc.value.username === mockUser2.username); + }); + }); await t.step( "Should update existing document entry by id using shallow merge", async () => { await useDb(async (db) => { - const id = "id" + const id = "id"; - const cr1 = await db.is_users.set(id, mockUser1) - assert(cr1.ok) + const cr1 = await db.is_users.set(id, mockUser1); + assert(cr1.ok); const updateData: Partial = { address: { @@ -37,7 +37,7 @@ Deno.test("serialized_indexable_collection - upsert", async (t) => { city: "London", houseNr: null, }, - } + }; const cr2 = await db.is_users.upsert({ id: id, @@ -45,30 +45,30 @@ Deno.test("serialized_indexable_collection - upsert", async (t) => { update: updateData, }, { strategy: "merge-shallow", - }) - - assert(cr2.ok) - - const doc = await db.is_users.find(id) - assert(doc !== null) - assert(doc.value.username === mockUser1.username) - assert(doc.value.age === mockUser1.age) - assert(doc.value.address.city === updateData.address?.city) - assert(doc.value.address.country === updateData.address.country) - assert(doc.value.address.houseNr === updateData.address.houseNr) - assert(doc.value.address.street === undefined) - }) + }); + + assert(cr2.ok); + + const doc = await db.is_users.find(id); + assert(doc !== null); + assert(doc.value.username === mockUser1.username); + assert(doc.value.age === mockUser1.age); + assert(doc.value.address.city === updateData.address?.city); + assert(doc.value.address.country === updateData.address.country); + assert(doc.value.address.houseNr === updateData.address.houseNr); + assert(doc.value.address.street === undefined); + }); }, - ) + ); await t.step( "Should update existing document entry by id using deep merge", async () => { await useDb(async (db) => { - const id = "id" + const id = "id"; - const cr1 = await db.is_users.set(id, mockUser1) - assert(cr1.ok) + const cr1 = await db.is_users.set(id, mockUser1); + assert(cr1.ok); const updateData: Partial = { address: { @@ -76,7 +76,7 @@ Deno.test("serialized_indexable_collection - upsert", async (t) => { city: "London", houseNr: null, }, - } + }; const cr2 = await db.is_users.upsert({ id: id, @@ -84,30 +84,30 @@ Deno.test("serialized_indexable_collection - upsert", async (t) => { update: updateData, }, { strategy: "merge", - }) - - assert(cr2.ok) - - const doc = await db.is_users.find(id) - assert(doc !== null) - assert(doc.value.username === mockUser1.username) - assert(doc.value.age === mockUser1.age) - assert(doc.value.address.city === updateData.address?.city) - assert(doc.value.address.country === updateData.address.country) - assert(doc.value.address.houseNr === updateData.address.houseNr) - assert(doc.value.address.street === mockUser1.address.street) - }) + }); + + assert(cr2.ok); + + const doc = await db.is_users.find(id); + assert(doc !== null); + assert(doc.value.username === mockUser1.username); + assert(doc.value.age === mockUser1.age); + assert(doc.value.address.city === updateData.address?.city); + assert(doc.value.address.country === updateData.address.country); + assert(doc.value.address.houseNr === updateData.address.houseNr); + assert(doc.value.address.street === mockUser1.address.street); + }); }, - ) + ); await t.step( "Should update existing document entry by id using replace", async () => { await useDb(async (db) => { - const id = "id" + const id = "id"; - const cr1 = await db.is_users.set(id, mockUser1) - assert(cr1.ok) + const cr1 = await db.is_users.set(id, mockUser1); + assert(cr1.ok); const cr2 = await db.is_users.upsert({ id: id, @@ -115,19 +115,19 @@ Deno.test("serialized_indexable_collection - upsert", async (t) => { update: mockUser3, }, { strategy: "replace", - }) - - assert(cr2.ok) - - const doc = await db.is_users.find(id) - assert(doc !== null) - assert(doc.value.username === mockUser3.username) - assert(doc.value.age === mockUser3.age) - assert(doc.value.address.city === mockUser3.address?.city) - assert(doc.value.address.country === mockUser3.address.country) - assert(doc.value.address.houseNr === mockUser3.address.houseNr) - assert(doc.value.address.street === mockUser3.address.street) - }) + }); + + assert(cr2.ok); + + const doc = await db.is_users.find(id); + assert(doc !== null); + assert(doc.value.username === mockUser3.username); + assert(doc.value.age === mockUser3.age); + assert(doc.value.address.city === mockUser3.address?.city); + assert(doc.value.address.country === mockUser3.address.country); + assert(doc.value.address.houseNr === mockUser3.address.houseNr); + assert(doc.value.address.street === mockUser3.address.street); + }); }, - ) -}) + ); +}); diff --git a/tests/serialized_indexable_collection/upsertByPrimaryIndex.test.ts b/tests/serialized_indexable_collection/upsertByPrimaryIndex.test.ts index 23584f0..688b2c2 100644 --- a/tests/serialized_indexable_collection/upsertByPrimaryIndex.test.ts +++ b/tests/serialized_indexable_collection/upsertByPrimaryIndex.test.ts @@ -1,7 +1,7 @@ -import { assert } from "../test.deps.ts" -import { mockUser1, mockUser2, mockUser3 } from "../mocks.ts" -import type { User } from "../models.ts" -import { useDb } from "../utils.ts" +import { assert } from "../test.deps.ts"; +import { mockUser1, mockUser2, mockUser3 } from "../mocks.ts"; +import type { User } from "../models.ts"; +import { useDb } from "../utils.ts"; Deno.test("serialized_indexable_collection - upsertByPrimaryIndex", async (t) => { await t.step("Should set new doucment entry by primary index", async () => { @@ -10,24 +10,24 @@ Deno.test("serialized_indexable_collection - upsertByPrimaryIndex", async (t) => index: ["username", mockUser1.username], set: mockUser2, update: mockUser3, - }) + }); - assert(cr.ok) + assert(cr.ok); - const doc = await db.is_users.find(cr.id) - assert(doc !== null) - assert(doc.value.username === mockUser2.username) - }) - }) + const doc = await db.is_users.find(cr.id); + assert(doc !== null); + assert(doc.value.username === mockUser2.username); + }); + }); await t.step( "Should update existing document entry by primary index using shallow merge", async () => { await useDb(async (db) => { - const id = "id" + const id = "id"; - const cr1 = await db.is_users.set(id, mockUser1) - assert(cr1.ok) + const cr1 = await db.is_users.set(id, mockUser1); + assert(cr1.ok); const updateData: Partial = { address: { @@ -35,7 +35,7 @@ Deno.test("serialized_indexable_collection - upsertByPrimaryIndex", async (t) => city: "London", houseNr: null, }, - } + }; const cr2 = await db.is_users.upsertByPrimaryIndex({ id: id, @@ -44,30 +44,30 @@ Deno.test("serialized_indexable_collection - upsertByPrimaryIndex", async (t) => update: updateData, }, { strategy: "merge-shallow", - }) - - assert(cr2.ok) - - const doc = await db.is_users.find(id) - assert(doc !== null) - assert(doc.value.username === mockUser1.username) - assert(doc.value.age === mockUser1.age) - assert(doc.value.address.city === updateData.address?.city) - assert(doc.value.address.country === updateData.address.country) - assert(doc.value.address.houseNr === updateData.address.houseNr) - assert(doc.value.address.street === undefined) - }) + }); + + assert(cr2.ok); + + const doc = await db.is_users.find(id); + assert(doc !== null); + assert(doc.value.username === mockUser1.username); + assert(doc.value.age === mockUser1.age); + assert(doc.value.address.city === updateData.address?.city); + assert(doc.value.address.country === updateData.address.country); + assert(doc.value.address.houseNr === updateData.address.houseNr); + assert(doc.value.address.street === undefined); + }); }, - ) + ); await t.step( "Should update existing document entry by primary index using deep merge", async () => { await useDb(async (db) => { - const id = "id" + const id = "id"; - const cr1 = await db.is_users.set(id, mockUser1) - assert(cr1.ok) + const cr1 = await db.is_users.set(id, mockUser1); + assert(cr1.ok); const updateData: Partial = { address: { @@ -75,7 +75,7 @@ Deno.test("serialized_indexable_collection - upsertByPrimaryIndex", async (t) => city: "London", houseNr: null, }, - } + }; const cr2 = await db.is_users.upsertByPrimaryIndex({ id: id, @@ -84,30 +84,30 @@ Deno.test("serialized_indexable_collection - upsertByPrimaryIndex", async (t) => update: updateData, }, { strategy: "merge", - }) - - assert(cr2.ok) - - const doc = await db.is_users.find(id) - assert(doc !== null) - assert(doc.value.username === mockUser1.username) - assert(doc.value.age === mockUser1.age) - assert(doc.value.address.city === updateData.address?.city) - assert(doc.value.address.country === updateData.address.country) - assert(doc.value.address.houseNr === updateData.address.houseNr) - assert(doc.value.address.street === mockUser1.address.street) - }) + }); + + assert(cr2.ok); + + const doc = await db.is_users.find(id); + assert(doc !== null); + assert(doc.value.username === mockUser1.username); + assert(doc.value.age === mockUser1.age); + assert(doc.value.address.city === updateData.address?.city); + assert(doc.value.address.country === updateData.address.country); + assert(doc.value.address.houseNr === updateData.address.houseNr); + assert(doc.value.address.street === mockUser1.address.street); + }); }, - ) + ); await t.step( "Should update existing document entry by primary index using replace", async () => { await useDb(async (db) => { - const id = "id" + const id = "id"; - const cr1 = await db.is_users.set(id, mockUser1) - assert(cr1.ok) + const cr1 = await db.is_users.set(id, mockUser1); + assert(cr1.ok); const cr2 = await db.is_users.upsertByPrimaryIndex({ id: id, @@ -116,19 +116,19 @@ Deno.test("serialized_indexable_collection - upsertByPrimaryIndex", async (t) => update: mockUser3, }, { strategy: "replace", - }) - - assert(cr2.ok) - - const doc = await db.is_users.find(id) - assert(doc !== null) - assert(doc.value.username === mockUser3.username) - assert(doc.value.age === mockUser3.age) - assert(doc.value.address.city === mockUser3.address?.city) - assert(doc.value.address.country === mockUser3.address.country) - assert(doc.value.address.houseNr === mockUser3.address.houseNr) - assert(doc.value.address.street === mockUser3.address.street) - }) + }); + + assert(cr2.ok); + + const doc = await db.is_users.find(id); + assert(doc !== null); + assert(doc.value.username === mockUser3.username); + assert(doc.value.age === mockUser3.age); + assert(doc.value.address.city === mockUser3.address?.city); + assert(doc.value.address.country === mockUser3.address.country); + assert(doc.value.address.houseNr === mockUser3.address.houseNr); + assert(doc.value.address.street === mockUser3.address.street); + }); }, - ) -}) + ); +}); diff --git a/tests/serialized_indexable_collection/watch.test.ts b/tests/serialized_indexable_collection/watch.test.ts index ec5e0c3..0c2782b 100644 --- a/tests/serialized_indexable_collection/watch.test.ts +++ b/tests/serialized_indexable_collection/watch.test.ts @@ -1,70 +1,70 @@ -import { assert } from "../test.deps.ts" -import { mockUser1, mockUser2, mockUser3 } from "../mocks.ts" -import { sleep, useDb } from "../utils.ts" -import type { Document } from "../../mod.ts" -import type { User } from "../models.ts" +import { assert } from "../test.deps.ts"; +import { mockUser1, mockUser2, mockUser3 } from "../mocks.ts"; +import { sleep, useDb } from "../utils.ts"; +import type { Document } from "../../mod.ts"; +import type { User } from "../models.ts"; Deno.test("serialized_indexable_collection - watch", async (t) => { await t.step("Should receive all document updates", async () => { await useDb(async (db) => { - const id = "id" - const docs: (Document | null)[] = [] + const id = "id"; + const docs: (Document | null)[] = []; const { promise, cancel } = db.is_users.watch(id, (doc) => { - docs.push(doc) - }) + docs.push(doc); + }); - await db.is_users.set(id, mockUser1) - await sleep(500) - await db.is_users.set(id, mockUser2, { overwrite: true }) - await sleep(500) - await db.is_users.update(id, mockUser3) - await sleep(500) - await db.is_users.delete(id) - await sleep(500) + await db.is_users.set(id, mockUser1); + await sleep(500); + await db.is_users.set(id, mockUser2, { overwrite: true }); + await sleep(500); + await db.is_users.update(id, mockUser3); + await sleep(500); + await db.is_users.delete(id); + await sleep(500); - assert(docs.some((doc) => doc?.value.username === mockUser1.username)) - assert(docs.some((doc) => doc?.value.username === mockUser2.username)) - assert(docs.some((doc) => doc?.value.username === mockUser3.username)) - assert(docs.some((doc) => doc === null)) + assert(docs.some((doc) => doc?.value.username === mockUser1.username)); + assert(docs.some((doc) => doc?.value.username === mockUser2.username)); + assert(docs.some((doc) => doc?.value.username === mockUser3.username)); + assert(docs.some((doc) => doc === null)); - await cancel() - await promise - }) - }) + await cancel(); + await promise; + }); + }); await t.step("Should not receive unrelated document updates", async () => { await useDb(async (db) => { - const id1 = "id1" - const id2 = "id2" - let count = 0 - let username = "" - let lastDoc: any + const id1 = "id1"; + const id2 = "id2"; + let count = 0; + let username = ""; + let lastDoc: any; const { promise, cancel } = db.is_users.watch(id1, (doc) => { - count++ - lastDoc = doc + count++; + lastDoc = doc; if (doc?.value.username) { - username = doc.value.username + username = doc.value.username; } - }) + }); - await db.is_users.set(id2, mockUser1) - await sleep(500) - await db.is_users.set(id2, mockUser2, { overwrite: true }) - await sleep(500) - await db.is_users.update(id2, mockUser3) - await sleep(500) - await db.is_users.delete(id2) - await sleep(500) + await db.is_users.set(id2, mockUser1); + await sleep(500); + await db.is_users.set(id2, mockUser2, { overwrite: true }); + await sleep(500); + await db.is_users.update(id2, mockUser3); + await sleep(500); + await db.is_users.delete(id2); + await sleep(500); // Account for initial invocation - assert(count === 1) - assert(username === "") - assert(lastDoc === null) + assert(count === 1); + assert(username === ""); + assert(lastDoc === null); - await cancel() - await promise - }) - }) -}) + await cancel(); + await promise; + }); + }); +}); diff --git a/tests/serialized_indexable_collection/watchMany.test.ts b/tests/serialized_indexable_collection/watchMany.test.ts index b487396..9d79c1c 100644 --- a/tests/serialized_indexable_collection/watchMany.test.ts +++ b/tests/serialized_indexable_collection/watchMany.test.ts @@ -1,112 +1,112 @@ -import { assert } from "../test.deps.ts" -import { mockUser1, mockUser2, mockUser3 } from "../mocks.ts" -import { generateUsers, sleep, useDb } from "../utils.ts" -import type { Document } from "../../mod.ts" -import type { User } from "../models.ts" +import { assert } from "../test.deps.ts"; +import { mockUser1, mockUser2, mockUser3 } from "../mocks.ts"; +import { generateUsers, sleep, useDb } from "../utils.ts"; +import type { Document } from "../../mod.ts"; +import type { User } from "../models.ts"; Deno.test("serialized_indexable_collection - watchMany", async (t) => { await t.step("Should receive all document updates", async () => { await useDb(async (db) => { - const id1 = "id1" - const id2 = "id2" - const id3 = "id3" - const generatedUser = generateUsers(1)[0] - const snapshots: (Document | null)[][] = [] + const id1 = "id1"; + const id2 = "id2"; + const id3 = "id3"; + const generatedUser = generateUsers(1)[0]; + const snapshots: (Document | null)[][] = []; - await db.is_users.set(id3, generatedUser) + await db.is_users.set(id3, generatedUser); - await sleep(500) + await sleep(500); const watcher = db.is_users.watchMany([id1, id2, id3], (docs) => { - snapshots.push(docs) - }) - - const cr1 = await db.is_users.set(id1, mockUser1) - await sleep(500) - await db.is_users.delete(id1) - await sleep(500) - const cr2 = await db.is_users.set(id2, mockUser2, { overwrite: true }) - await sleep(500) - const cr3 = await db.is_users.update(id3, mockUser3) - await sleep(500) - - assert(cr1.ok) - assert(cr2.ok) - assert(cr3.ok) + snapshots.push(docs); + }); + + const cr1 = await db.is_users.set(id1, mockUser1); + await sleep(500); + await db.is_users.delete(id1); + await sleep(500); + const cr2 = await db.is_users.set(id2, mockUser2, { overwrite: true }); + await sleep(500); + const cr3 = await db.is_users.update(id3, mockUser3); + await sleep(500); + + assert(cr1.ok); + assert(cr2.ok); + assert(cr3.ok); assert(snapshots.some((docs) => { - const doc1 = docs.find((doc) => doc?.id === id1) ?? null - const doc2 = docs.find((doc) => doc?.id === id2) ?? null - const doc3 = docs.find((doc) => doc?.id === id3) ?? null + const doc1 = docs.find((doc) => doc?.id === id1) ?? null; + const doc2 = docs.find((doc) => doc?.id === id2) ?? null; + const doc3 = docs.find((doc) => doc?.id === id3) ?? null; return doc1?.value.username === mockUser1.username && doc2 === null && - doc3?.value.username === generatedUser.username - })) + doc3?.value.username === generatedUser.username; + })); assert(snapshots.some((docs) => { - const doc1 = docs.find((doc) => doc?.id === id1) ?? null - const doc2 = docs.find((doc) => doc?.id === id2) ?? null - const doc3 = docs.find((doc) => doc?.id === id3) ?? null + const doc1 = docs.find((doc) => doc?.id === id1) ?? null; + const doc2 = docs.find((doc) => doc?.id === id2) ?? null; + const doc3 = docs.find((doc) => doc?.id === id3) ?? null; return doc1 === null && doc2 === null && - doc3?.value.username === generatedUser.username - })) + doc3?.value.username === generatedUser.username; + })); assert(snapshots.some((docs) => { - const doc1 = docs.find((doc) => doc?.id === id1) ?? null - const doc2 = docs.find((doc) => doc?.id === id2) ?? null - const doc3 = docs.find((doc) => doc?.id === id3) ?? null + const doc1 = docs.find((doc) => doc?.id === id1) ?? null; + const doc2 = docs.find((doc) => doc?.id === id2) ?? null; + const doc3 = docs.find((doc) => doc?.id === id3) ?? null; return doc1 === null && doc2?.value.username === mockUser2.username && - doc3?.value.username === generatedUser.username - })) + doc3?.value.username === generatedUser.username; + })); assert(snapshots.some((docs) => { - const doc1 = docs.find((doc) => doc?.id === id1) ?? null - const doc2 = docs.find((doc) => doc?.id === id2) ?? null - const doc3 = docs.find((doc) => doc?.id === id3) ?? null + const doc1 = docs.find((doc) => doc?.id === id1) ?? null; + const doc2 = docs.find((doc) => doc?.id === id2) ?? null; + const doc3 = docs.find((doc) => doc?.id === id3) ?? null; return doc1 === null && doc2?.value.username === mockUser2.username && - doc3?.value.username === mockUser3.username - })) + doc3?.value.username === mockUser3.username; + })); - return async () => await watcher - }) - }) + return async () => await watcher; + }); + }); await t.step("Should not receive unrelated document updates", async () => { await useDb(async (db) => { - const id1 = "id1" - const id2 = "id1" - const id3 = "id1" - const id4 = "id4" - let count = 0 - let lastDocs: any[] = [] + const id1 = "id1"; + const id2 = "id1"; + const id3 = "id1"; + const id4 = "id4"; + let count = 0; + let lastDocs: any[] = []; const watcher = db.is_users.watchMany([id1, id2, id3], (docs) => { - count++ - lastDocs = docs - }) - - await db.is_users.set(id4, mockUser1) - await sleep(500) - await db.is_users.set(id4, mockUser2, { overwrite: true }) - await sleep(500) - await db.is_users.update(id4, mockUser3) - await sleep(500) - await db.is_users.delete(id4) - await sleep(500) - - assert(count === 1) - assert(lastDocs[0] === null) - assert(lastDocs[1] === null) - assert(lastDocs[2] === null) - - return async () => await watcher - }) - }) -}) + count++; + lastDocs = docs; + }); + + await db.is_users.set(id4, mockUser1); + await sleep(500); + await db.is_users.set(id4, mockUser2, { overwrite: true }); + await sleep(500); + await db.is_users.update(id4, mockUser3); + await sleep(500); + await db.is_users.delete(id4); + await sleep(500); + + assert(count === 1); + assert(lastDocs[0] === null); + assert(lastDocs[1] === null); + assert(lastDocs[2] === null); + + return async () => await watcher; + }); + }); +}); diff --git a/tests/test.deps.ts b/tests/test.deps.ts index ce3d67d..c4046b2 100644 --- a/tests/test.deps.ts +++ b/tests/test.deps.ts @@ -1,6 +1,6 @@ -export { assert } from "jsr:@std/assert@^0.217/assert" -export { assertEquals } from "jsr:@std/assert@^0.217/assert_equals" -export { assertNotEquals } from "jsr:@std/assert@^0.217/assert_not_equals" -export { assertThrows } from "jsr:@std/assert@^0.217/assert_throws" -export { z } from "npm:zod@^3.22" -export type { Kv } from "npm:@deno/kv" +export { assert } from "jsr:@std/assert@^0.217/assert"; +export { assertEquals } from "jsr:@std/assert@^0.217/assert_equals"; +export { assertNotEquals } from "jsr:@std/assert@^0.217/assert_not_equals"; +export { assertThrows } from "jsr:@std/assert@^0.217/assert_throws"; +export { z } from "npm:zod@^3.22"; +export type { Kv } from "npm:@deno/kv"; diff --git a/tests/utils.ts b/tests/utils.ts index 29ea1b2..79c9de6 100644 --- a/tests/utils.ts +++ b/tests/utils.ts @@ -1,7 +1,7 @@ -import { collection, type DenoKv, type DenoKvU64, kvdex } from "../mod.ts" -import { MapKv } from "../src/ext/kv/map_kv.ts" -import { model } from "../src/model.ts" -import { TransformUserModel, type User, UserSchema } from "./models.ts" +import { collection, type DenoKv, type DenoKvU64, kvdex } from "../mod.ts"; +import { MapKv } from "../src/ext/kv/map_kv.ts"; +import { model } from "../src/model.ts"; +import { TransformUserModel, type User, UserSchema } from "./models.ts"; // Create test db export function createDb(kv: DenoKv) { @@ -61,7 +61,7 @@ export function createDb(kv: DenoKv) { }, serialize: "json", }), - }) + }); } // Temporary use functions @@ -70,13 +70,13 @@ export async function useKv( ) { const kv = Deno.args[0] === "map" ? new MapKv() - : await Deno.openKv(":memory:") + : await Deno.openKv(":memory:"); - const result = await fn(kv) - kv.close() + const result = await fn(kv); + kv.close(); if (typeof result === "function") { - await result() + await result(); } } @@ -84,22 +84,22 @@ export async function useDb( fn: (db: ReturnType) => unknown, ) { await useKv(async (kv) => { - const db = createDb(kv) - return await fn(db) - }) + const db = createDb(kv); + return await fn(db); + }); } // Generator functions export function generateLargeUsers(n: number) { - const users: User[] = [] + const users: User[] = []; - let country = "" + let country = ""; for (let i = 0; i < 300_000; i++) { - country += "A" + country += "A"; } for (let i = 0; i < n; i++) { - const r = Math.random() + const r = Math.random(); users.push({ username: `user_${i}`, age: Math.floor(15 + i / 5), @@ -109,17 +109,17 @@ export function generateLargeUsers(n: number) { street: r < 0.5 ? "Olav Kyrres gate" : "Karl Johans gate", houseNr: Math.round(Math.random() * 100), }, - }) + }); } - return users + return users; } export function generateUsers(n: number) { - const users: User[] = [] + const users: User[] = []; for (let i = 0; i < n; i++) { - const r = Math.random() + const r = Math.random(); users.push({ username: `user_${i}`, age: Math.floor(15 + i / 5), @@ -129,14 +129,14 @@ export function generateUsers(n: number) { street: r < 0.5 ? "Olav Kyrres gate" : "Karl Johans gate", houseNr: Math.round(Math.random() * 100), }, - }) + }); } - return users + return users; } export function generateInvalidUsers(n: number) { - const users: User[] = [] + const users: User[] = []; for (let i = 0; i < n; i++) { users.push({ @@ -145,32 +145,32 @@ export function generateInvalidUsers(n: number) { address: { street: 100n, }, - } as unknown as User) + } as unknown as User); } - return users + return users; } export function generateNumbers(n: number) { - const numbers: number[] = [] + const numbers: number[] = []; for (let i = 0; i < n; i++) { - numbers.push(i) + numbers.push(i); } - return numbers + return numbers; } // Sleep functions export async function sleep(ms: number) { - await new Promise((resolve) => setTimeout(resolve, ms)) + await new Promise((resolve) => setTimeout(resolve, ms)); } export function createResolver() { - let resolve = (_?: unknown) => {} - const promise = new Promise((r) => resolve = r) + let resolve = (_?: unknown) => {}; + const promise = new Promise((r) => resolve = r); return { resolve, promise, - } + }; } diff --git a/tests/utils/isKvObject.test.ts b/tests/utils/isKvObject.test.ts index 26b1779..196d25c 100644 --- a/tests/utils/isKvObject.test.ts +++ b/tests/utils/isKvObject.test.ts @@ -1,20 +1,22 @@ -import { assert } from "../test.deps.ts" -import { TKvU64, TObject, VALUES } from "../values.ts" -import { isKvObject } from "../../src/utils.ts" +import { assert } from "../test.deps.ts"; +import { TKvU64, TObject, VALUES } from "../values.ts"; +import { isKvObject } from "../../src/utils.ts"; Deno.test("utils - isKvObject", async (t) => { await t.step("Should return true for normal objects", () => { - assert(isKvObject(TObject)) - assert(isKvObject(TKvU64)) - }) + assert(isKvObject(TObject)); + assert(isKvObject(TKvU64)); + }); await t.step( "Should return false for all non-normal objects and primtives", () => { - const objIndex = VALUES.indexOf(TObject) - const u64Index = VALUES.indexOf(TKvU64) - const filtered = VALUES.filter((_, i) => i !== objIndex && i !== u64Index) - assert(filtered.every((val) => !isKvObject(val))) + const objIndex = VALUES.indexOf(TObject); + const u64Index = VALUES.indexOf(TKvU64); + const filtered = VALUES.filter((_, i) => + i !== objIndex && i !== u64Index + ); + assert(filtered.every((val) => !isKvObject(val))); }, - ) -}) + ); +}); diff --git a/tests/utils/jsonDeserialize.test.ts b/tests/utils/jsonDeserialize.test.ts index 4027b25..ba75509 100644 --- a/tests/utils/jsonDeserialize.test.ts +++ b/tests/utils/jsonDeserialize.test.ts @@ -1,14 +1,14 @@ -import { jsonDeserialize, jsonSerialize } from "../../src/utils.ts" -import { assertEquals } from "../test.deps.ts" -import { VALUES } from "../values.ts" +import { jsonDeserialize, jsonSerialize } from "../../src/utils.ts"; +import { assertEquals } from "../test.deps.ts"; +import { VALUES } from "../values.ts"; Deno.test("utils - jsonDeserialize", async (t) => { await t.step( "Should successfully deserialize all KvValue type values from Uint8Array", () => { - const serialized = VALUES.map(jsonSerialize) - const deserialized = serialized.map(jsonDeserialize) - assertEquals(VALUES, deserialized) + const serialized = VALUES.map(jsonSerialize); + const deserialized = serialized.map(jsonDeserialize); + assertEquals(VALUES, deserialized); }, - ) -}) + ); +}); diff --git a/tests/utils/jsonSerialize.test.ts b/tests/utils/jsonSerialize.test.ts index 5184c7d..adf3f6e 100644 --- a/tests/utils/jsonSerialize.test.ts +++ b/tests/utils/jsonSerialize.test.ts @@ -1,13 +1,13 @@ -import { jsonSerialize } from "../../src/utils.ts" -import { assert } from "../test.deps.ts" -import { VALUES } from "../values.ts" +import { jsonSerialize } from "../../src/utils.ts"; +import { assert } from "../test.deps.ts"; +import { VALUES } from "../values.ts"; Deno.test("utils - jsonSerialize", async (t) => { await t.step( "Should successfully serialize all KvValue type values", () => { - const serialized = VALUES.map(jsonSerialize) - assert(serialized.every((val) => val instanceof Uint8Array)) + const serialized = VALUES.map(jsonSerialize); + assert(serialized.every((val) => val instanceof Uint8Array)); }, - ) -}) + ); +}); diff --git a/tests/utils/v8Deserialize.test.ts b/tests/utils/v8Deserialize.test.ts index 2266c3e..665f42b 100644 --- a/tests/utils/v8Deserialize.test.ts +++ b/tests/utils/v8Deserialize.test.ts @@ -1,14 +1,20 @@ -import { v8Deserialize, v8Serialize } from "../../src/utils.ts" -import { assertEquals } from "../test.deps.ts" -import { VALUES } from "../values.ts" +import { v8Deserialize, v8Serialize } from "../../src/utils.ts"; +import { assertEquals } from "../test.deps.ts"; +import { VALUES } from "../values.ts"; Deno.test("utils - v8Deserialize", async (t) => { await t.step( "Should successfully deserialize all KvValue type values from Uint8Array", () => { - const serialized = VALUES.map(v8Serialize) - const deserialized = serialized.map(v8Deserialize) - assertEquals(VALUES, deserialized) + const serialized = VALUES.map(v8Serialize); + const deserialized = serialized.map((val) => { + try { + return v8Deserialize(val); + } catch (e) { + throw new Error(`Failed to deserialize value: ${val}, Error: ${e}`); + } + }); + assertEquals(VALUES, deserialized); }, - ) -}) + ); +}); diff --git a/tests/utils/v8Serialize.test.ts b/tests/utils/v8Serialize.test.ts index a3bfb58..c063fda 100644 --- a/tests/utils/v8Serialize.test.ts +++ b/tests/utils/v8Serialize.test.ts @@ -1,13 +1,21 @@ -import { v8Serialize } from "../../src/utils.ts" -import { assert } from "../test.deps.ts" -import { VALUES } from "../values.ts" +import { v8Serialize } from "../../src/utils.ts"; +import { assert } from "../test.deps.ts"; +import { VALUES } from "../values.ts"; Deno.test("utils - v8Serialize", async (t) => { await t.step( "Should successfully serialize all KvValue type values", () => { - const serialized = VALUES.map(v8Serialize) - assert(serialized.every((val) => val instanceof Uint8Array)) + const serialized = VALUES.map((val) => { + try { + return v8Serialize(val); + } catch (e) { + throw new Error( + `Failed to serialize value: ${val}, Error: ${e}`, + ); + } + }); + assert(serialized.every((val) => val instanceof Uint8Array)); }, - ) -}) + ); +}); diff --git a/tests/values.ts b/tests/values.ts index cda9cb9..6d46834 100644 --- a/tests/values.ts +++ b/tests/values.ts @@ -1,31 +1,31 @@ -import type { DenoKvU64, KvValue } from "../mod.ts" +import type { DenoKvU64, KvValue } from "../mod.ts"; -export const TUndefined = undefined -export const TNull = null -export const TNaN = NaN -export const TInfinity = Infinity -export const TNumber = 10 -export const TString = "string" -export const TBigint = 10n -export const TKvU64 = { value: 10n } satisfies DenoKvU64 -export const TBoolean = true -export const TInt8Array = new Int8Array([10, 20, 30]) -export const TInt16Array = new Int16Array([10, 20, 30]) -export const TInt32Array = new Int32Array([10, 20, 30]) -export const TBigInt64Array = new BigInt64Array([10n, 20n, 30n]) -export const TUint8Array = new Uint8Array([10, 20, 30]) -export const TUint16Array = new Uint16Array([10, 20, 30]) -export const TUint32Array = new Uint32Array([10, 20, 30]) -export const TBigUint64Array = new BigUint64Array([10n, 20n, 30n]) -export const TUint8ClampedArray = new Uint8ClampedArray([10, 20, 30]) -export const TFloat16Array = new Float16Array([10.203423878293472837429384]) -export const TFloat32Array = new Float32Array([10.203423878293472837429384]) -export const TFloat64Array = new Float64Array([10.203423878293472837429384]) -export const TBuffer = new Uint8Array([10, 20, 30]).buffer -export const TDate = new Date() -export const TRegExp = new RegExp("[0-9]") -export const TDataView = new DataView(new Uint8Array([10, 20, 30]).buffer) -export const TError = new Error("error") +export const TUndefined = undefined; +export const TNull = null; +export const TNaN = NaN; +export const TInfinity = Infinity; +export const TNumber = 10; +export const TString = "string"; +export const TBigint = 10n; +export const TKvU64 = { value: 10n } satisfies DenoKvU64; +export const TBoolean = true; +export const TInt8Array = new Int8Array([10, 20, 30]); +export const TInt16Array = new Int16Array([10, 20, 30]); +export const TInt32Array = new Int32Array([10, 20, 30]); +export const TBigInt64Array = new BigInt64Array([10n, 20n, 30n]); +export const TUint8Array = new Uint8Array([10, 20, 30]); +export const TUint16Array = new Uint16Array([10, 20, 30]); +export const TUint32Array = new Uint32Array([10, 20, 30]); +export const TBigUint64Array = new BigUint64Array([10n, 20n, 30n]); +export const TUint8ClampedArray = new Uint8ClampedArray([10, 20, 30]); +export const TFloat16Array = new Float16Array([10.203423878293472837429384]); +export const TFloat32Array = new Float32Array([10.203423878293472837429384]); +export const TFloat64Array = new Float64Array([10.203423878293472837429384]); +export const TBuffer = new Uint8Array([10, 20, 30]).buffer; +export const TDate = new Date(); +export const TRegExp = new RegExp("[0-9]"); +export const TDataView = new DataView(new Uint8Array([10, 20, 30]).buffer); +export const TError = new Error("error"); export const TArray = [ TUndefined, TNull, @@ -53,7 +53,7 @@ export const TArray = [ TRegExp, TDataView, TError, -] +]; export const TObject = { TUndefined, TNull, @@ -82,10 +82,10 @@ export const TObject = { TDataView, TError, TArray, -} +}; -export const TSet = new Set(TArray) -export const TMap = new Map(TArray.map((val, i) => [i, val])) +export const TSet = new Set(TArray); +export const TMap = new Map(TArray.map((val, i) => [i, val])); export const VALUES = [ TUndefined, @@ -118,4 +118,4 @@ export const VALUES = [ TObject, TSet, TMap, -] +];