From 72672f058a5f27cb5c016b364f0e8e0d6cdd67d9 Mon Sep 17 00:00:00 2001 From: chereseeriepa Date: Thu, 4 Jul 2024 14:07:24 +1200 Subject: [PATCH] get rid of seg fault --- .gitignore | 2 + README.md | 2 - package-lock.json | 200 +- package.json | 5 +- packages/indexdb/tests/init.test.ts | 2 +- packages/indexdb/tests/setup.ts | 3 +- packages/inmemory/tests/setup.ts | 3 +- packages/leveldb/tests/init.test.ts | 3 + packages/leveldb/tests/setup.ts | 9 +- packages/test-suite/src/helper/index.ts | 23 +- packages/test-suite/src/index.ts | 2419 ++++++++++++----------- 11 files changed, 1446 insertions(+), 1225 deletions(-) diff --git a/.gitignore b/.gitignore index 1f8d887c..3561a86e 100644 --- a/.gitignore +++ b/.gitignore @@ -6,3 +6,5 @@ build prism-db *.tgz docs-html +db/ +crash.log \ No newline at end of file diff --git a/README.md b/README.md index c446bf4d..63dbed2b 100644 --- a/README.md +++ b/README.md @@ -90,5 +90,3 @@ npm install npm run build npm run test ``` - - diff --git a/package-lock.json b/package-lock.json index 08ed1af7..e8f3b7ba 100644 --- a/package-lock.json +++ b/package-lock.json @@ -12,6 +12,10 @@ "workspaces": [ "packages/*" ], + "dependencies": { + "gdb": "^0.1.0-pre", + "segfault-handler": "^1.3.0" + }, "devDependencies": { "@bndynet/typedoc-theme": "^0.0.1", "@rollup/plugin-commonjs": "^25.0.0", @@ -1835,7 +1839,7 @@ "version": "2.1.5", "resolved": "https://registry.npmjs.org/@nodelib/fs.scandir/-/fs.scandir-2.1.5.tgz", "integrity": "sha512-vq24Bq3ym5HEQm2NKCr3yXDwjc7vTsEThRDnkp2DK9p1uqLR+DHurm/NOTo0KG7HYHU7eppKZj3MyqYuMBf62g==", - "dev": true, + "devOptional": true, "dependencies": { "@nodelib/fs.stat": "2.0.5", "run-parallel": "^1.1.9" @@ -1848,7 +1852,7 @@ "version": "2.0.5", "resolved": "https://registry.npmjs.org/@nodelib/fs.stat/-/fs.stat-2.0.5.tgz", "integrity": "sha512-RkhPPp2zrqDAQA/2jNhnztcPAlv64XdhIp7a7454A5ovI7Bukxgt7MX7udwAu3zg1DcpPU0rz3VV1SeaqvY4+A==", - "dev": true, + "devOptional": true, "engines": { "node": ">= 8" } @@ -1857,7 +1861,7 @@ "version": "1.2.8", "resolved": "https://registry.npmjs.org/@nodelib/fs.walk/-/fs.walk-1.2.8.tgz", "integrity": "sha512-oGB+UxlgWcgQkgwo8GcEGwemoTFt3FIO9ababBmaGwXIoBKZ+GTy0pP185beGg7Llih/NSHSV2XAs1lnznocSg==", - "dev": true, + "devOptional": true, "dependencies": { "@nodelib/fs.scandir": "2.1.5", "fastq": "^1.6.0" @@ -2269,6 +2273,13 @@ "node": ">=12" } }, + "node_modules/@polka/url": { + "version": "1.0.0-next.25", + "resolved": "https://registry.npmjs.org/@polka/url/-/url-1.0.0-next.25.tgz", + "integrity": "sha512-j7P6Rgr3mmtdkeDGTe0E/aYyWEWVtc5yFXtHCRHs28/jptDEWfaVOc5T7cblqy1XKPPfCxJc/8DwQ5YgLOZOVQ==", + "optional": true, + "peer": true + }, "node_modules/@protobufjs/aspromise": { "version": "1.1.2", "resolved": "https://registry.npmjs.org/@protobufjs/aspromise/-/aspromise-1.1.2.tgz", @@ -3749,6 +3760,28 @@ "url": "https://opencollective.com/vitest" } }, + "node_modules/@vitest/ui": { + "version": "1.6.0", + "resolved": "https://registry.npmjs.org/@vitest/ui/-/ui-1.6.0.tgz", + "integrity": "sha512-k3Lyo+ONLOgylctiGovRKy7V4+dIN2yxstX3eY5cWFXH6WP+ooVX79YSyi0GagdTQzLmT43BF27T0s6dOIPBXA==", + "optional": true, + "peer": true, + "dependencies": { + "@vitest/utils": "1.6.0", + "fast-glob": "^3.3.2", + "fflate": "^0.8.1", + "flatted": "^3.2.9", + "pathe": "^1.1.1", + "picocolors": "^1.0.0", + "sirv": "^2.0.4" + }, + "funding": { + "url": "https://opencollective.com/vitest" + }, + "peerDependencies": { + "vitest": "1.6.0" + } + }, "node_modules/@vitest/utils": { "version": "1.6.0", "resolved": "https://registry.npmjs.org/@vitest/utils/-/utils-1.6.0.tgz", @@ -4453,6 +4486,14 @@ "node": ">=16" } }, + "node_modules/bindings": { + "version": "1.5.0", + "resolved": "https://registry.npmjs.org/bindings/-/bindings-1.5.0.tgz", + "integrity": "sha512-p2q/t/mhvuOj/UeLlV6566GD/guowlr0hHxClI0W9m7MWYkL1F0hLo+0Aexs9HSPCtR1SXQ0TD3MMKrXZajbiQ==", + "dependencies": { + "file-uri-to-path": "1.0.0" + } + }, "node_modules/bl": { "version": "4.1.0", "resolved": "https://registry.npmjs.org/bl/-/bl-4.1.0.tgz", @@ -4503,7 +4544,7 @@ "version": "3.0.3", "resolved": "https://registry.npmjs.org/braces/-/braces-3.0.3.tgz", "integrity": "sha512-yQbXgO/OSZVD2IsiLlro+7Hf6Q18EJrKSEsdoMzKePKXct3gvD8oLcOQdIzGupr5Fj+EDe8gO/lxc1BzfMpxvA==", - "dev": true, + "devOptional": true, "dependencies": { "fill-range": "^7.1.1" }, @@ -4853,23 +4894,6 @@ "wrap-ansi": "^7.0.0" } }, - "node_modules/cli-highlight/node_modules/wrap-ansi": { - "version": "7.0.0", - "resolved": "https://registry.npmjs.org/wrap-ansi/-/wrap-ansi-7.0.0.tgz", - "integrity": "sha512-YVGIj2kamLSTxw6NsZjoBxfSwsn0ycdesmc4p+Q21c5zPuZ1pl+NfxVdxPtdHvmNVOQ6XSYG4AUtyt/Fi7D16Q==", - "dev": true, - "dependencies": { - "ansi-styles": "^4.0.0", - "string-width": "^4.1.0", - "strip-ansi": "^6.0.0" - }, - "engines": { - "node": ">=10" - }, - "funding": { - "url": "https://github.com/chalk/wrap-ansi?sponsor=1" - } - }, "node_modules/cli-highlight/node_modules/yargs": { "version": "16.2.0", "resolved": "https://registry.npmjs.org/yargs/-/yargs-16.2.0.tgz", @@ -4938,23 +4962,6 @@ "node": ">=12" } }, - "node_modules/cliui/node_modules/wrap-ansi": { - "version": "7.0.0", - "resolved": "https://registry.npmjs.org/wrap-ansi/-/wrap-ansi-7.0.0.tgz", - "integrity": "sha512-YVGIj2kamLSTxw6NsZjoBxfSwsn0ycdesmc4p+Q21c5zPuZ1pl+NfxVdxPtdHvmNVOQ6XSYG4AUtyt/Fi7D16Q==", - "dev": true, - "dependencies": { - "ansi-styles": "^4.0.0", - "string-width": "^4.1.0", - "strip-ansi": "^6.0.0" - }, - "engines": { - "node": ">=10" - }, - "funding": { - "url": "https://github.com/chalk/wrap-ansi?sponsor=1" - } - }, "node_modules/clone": { "version": "1.0.4", "resolved": "https://registry.npmjs.org/clone/-/clone-1.0.4.tgz", @@ -6787,7 +6794,7 @@ "version": "3.3.2", "resolved": "https://registry.npmjs.org/fast-glob/-/fast-glob-3.3.2.tgz", "integrity": "sha512-oX2ruAFQwf/Orj8m737Y5adxDQO0LAB7/S5MnxCdTNDd4p6BsyIVsv9JQsATbTSq8KHRpLwIHbVlUNatxd+1Ow==", - "dev": true, + "devOptional": true, "dependencies": { "@nodelib/fs.stat": "^2.0.2", "@nodelib/fs.walk": "^1.2.3", @@ -6803,7 +6810,7 @@ "version": "5.1.2", "resolved": "https://registry.npmjs.org/glob-parent/-/glob-parent-5.1.2.tgz", "integrity": "sha512-AOIgSQCepiJYwP3ARnGx+5VnTu2HBYdzbGP45eLw1vr3zB3vZLeyed1sC9hnbcOc9/SrMyM5RPQrkGz4aS9Zow==", - "dev": true, + "devOptional": true, "dependencies": { "is-glob": "^4.0.1" }, @@ -6827,7 +6834,7 @@ "version": "1.17.1", "resolved": "https://registry.npmjs.org/fastq/-/fastq-1.17.1.tgz", "integrity": "sha512-sRVD3lWVIXWg6By68ZN7vho9a1pQcN/WBFaAAsDDFzlJjvoGx0P8z7V1t72grFJfJhu3YPZBuu25f7Kaw2jN1w==", - "dev": true, + "devOptional": true, "dependencies": { "reusify": "^1.0.4" } @@ -6844,6 +6851,13 @@ "node": ">=0.8.0" } }, + "node_modules/fflate": { + "version": "0.8.2", + "resolved": "https://registry.npmjs.org/fflate/-/fflate-0.8.2.tgz", + "integrity": "sha512-cPJU47OaAoCbg0pBvzsgpTPhmhqI5eJjh/JIu8tPj5q+T7iLvW/JAYUqmE7KOB4R1ZyEhzBaIQpQpardBF5z8A==", + "optional": true, + "peer": true + }, "node_modules/figures": { "version": "3.2.0", "resolved": "https://registry.npmjs.org/figures/-/figures-3.2.0.tgz", @@ -6900,6 +6914,11 @@ "webpack": "^4.0.0 || ^5.0.0" } }, + "node_modules/file-uri-to-path": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/file-uri-to-path/-/file-uri-to-path-1.0.0.tgz", + "integrity": "sha512-0Zt+s3L7Vf1biwWZ29aARiVYLx7iMGnEUl9x33fbB/j3jR81u/O2LbqK+Bm1CDSNDKVtJ/YjwY7TUd5SkeLQLw==" + }, "node_modules/file-url": { "version": "3.0.0", "resolved": "https://registry.npmjs.org/file-url/-/file-url-3.0.0.tgz", @@ -6913,7 +6932,7 @@ "version": "7.1.1", "resolved": "https://registry.npmjs.org/fill-range/-/fill-range-7.1.1.tgz", "integrity": "sha512-YsGpe3WHLK8ZYi4tWDg2Jy3ebRz2rXowDxnld4bkQB00cc/1Zw9AWnC0i9ztDJitivtQvaI9KaLyKrc+hBW0yg==", - "dev": true, + "devOptional": true, "dependencies": { "to-regex-range": "^5.0.1" }, @@ -7136,7 +7155,7 @@ "version": "3.3.1", "resolved": "https://registry.npmjs.org/flatted/-/flatted-3.3.1.tgz", "integrity": "sha512-X8cqMLLie7KsNUDSdzeN8FYK9rEt4Dt67OsG/DNGnYTSDBG4uFAJFBnUeiV+zCVAvwFy56IjM9sH51jVaEhNxw==", - "dev": true + "devOptional": true }, "node_modules/follow-redirects": { "version": "1.15.6", @@ -7340,6 +7359,14 @@ "url": "https://github.com/sponsors/ljharb" } }, + "node_modules/gdb": { + "version": "0.1.0-pre", + "resolved": "https://registry.npmjs.org/gdb/-/gdb-0.1.0-pre.tgz", + "integrity": "sha512-K336zUvoQ904aX2fn8LidUym7WvQi1y+ARmqEtCFDEAV8bJI4ZBrSiTwLI51oZi72ycOwcqgXH338yC9w1FkLA==", + "bin": { + "ngdb": "bin/ngdb" + } + }, "node_modules/generate-function": { "version": "2.3.1", "resolved": "https://registry.npmjs.org/generate-function/-/generate-function-2.3.1.tgz", @@ -8213,7 +8240,7 @@ "version": "2.1.1", "resolved": "https://registry.npmjs.org/is-extglob/-/is-extglob-2.1.1.tgz", "integrity": "sha512-SbKbANkN603Vi4jEZv49LeVJMn4yGwsbzZworEoyEiutsN3nJYdbO36zfhGJ6QEDpOZIFkDtnq5JRxmvl3jsoQ==", - "dev": true, + "devOptional": true, "engines": { "node": ">=0.10.0" } @@ -8246,7 +8273,7 @@ "version": "4.0.3", "resolved": "https://registry.npmjs.org/is-glob/-/is-glob-4.0.3.tgz", "integrity": "sha512-xelSayHH36ZgE7ZWhli7pW34hNbNl8Ojv5KVmkJD4hBdD3th8Tfk9vYasLM+mXWOZhFkgZfxhLSnrwRr4elSSg==", - "dev": true, + "devOptional": true, "dependencies": { "is-extglob": "^2.1.1" }, @@ -8323,7 +8350,7 @@ "version": "7.0.0", "resolved": "https://registry.npmjs.org/is-number/-/is-number-7.0.0.tgz", "integrity": "sha512-41Cifkg6e8TylSpdtTpeLVMqvSBEVzTttHvERD741+pnZ8ANv0004MRL43QKPDlK9cGvNp6NZWZUBlbGXYxxng==", - "dev": true, + "devOptional": true, "engines": { "node": ">=0.12.0" } @@ -9592,7 +9619,7 @@ "version": "1.4.1", "resolved": "https://registry.npmjs.org/merge2/-/merge2-1.4.1.tgz", "integrity": "sha512-8q7VEgMJW4J8tcfVPy8g09NcQwZdbwFEqhe/WZkoIzjn/3TGDwtOCYtXGxA3O8tPzpczCCDgv+P2P5y00ZJOOg==", - "dev": true, + "devOptional": true, "engines": { "node": ">= 8" } @@ -9601,7 +9628,7 @@ "version": "4.0.7", "resolved": "https://registry.npmjs.org/micromatch/-/micromatch-4.0.7.tgz", "integrity": "sha512-LPP/3KorzCwBxfeUuZmaR6bG2kdeHSbe0P2tY3FLRU4vYrjYz5hI4QZwV0njUx3jeuKe67YukQ1LSPZBKDqO/Q==", - "dev": true, + "devOptional": true, "dependencies": { "braces": "^3.0.3", "picomatch": "^2.3.1" @@ -9878,6 +9905,16 @@ "node": ">=16" } }, + "node_modules/mrmime": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/mrmime/-/mrmime-2.0.0.tgz", + "integrity": "sha512-eu38+hdgojoyq63s+yTpN4XMBdt5l8HhMhc4VKLO9KM5caLIBvUm4thi7fFaxyTmCKeNnXZ5pAlBwCUnhA09uw==", + "optional": true, + "peer": true, + "engines": { + "node": ">=10" + } + }, "node_modules/ms": { "version": "2.1.2", "resolved": "https://registry.npmjs.org/ms/-/ms-2.1.2.tgz", @@ -9894,6 +9931,11 @@ "thenify-all": "^1.0.0" } }, + "node_modules/nan": { + "version": "2.20.0", + "resolved": "https://registry.npmjs.org/nan/-/nan-2.20.0.tgz", + "integrity": "sha512-bk3gXBZDGILuuo/6sKtr0DQmSThYHLtNCdSdXk9YkxD/jK6X2vmCyyXBBxyqZ4XcnzTyYEAThfX3DCEnLf6igw==" + }, "node_modules/nanoid": { "version": "3.3.7", "resolved": "https://registry.npmjs.org/nanoid/-/nanoid-3.3.7.tgz", @@ -13494,7 +13536,7 @@ "version": "2.3.1", "resolved": "https://registry.npmjs.org/picomatch/-/picomatch-2.3.1.tgz", "integrity": "sha512-JU3teHTNjmE2VCGFzuY8EXzCDVwEqB2a8fsIvwaStHhAWJEeVd1o1QD80CU6+ZdEXXSLbSsuLwJjkCBWqRQUVA==", - "dev": true, + "devOptional": true, "engines": { "node": ">=8.6" }, @@ -14006,7 +14048,7 @@ "version": "1.2.3", "resolved": "https://registry.npmjs.org/queue-microtask/-/queue-microtask-1.2.3.tgz", "integrity": "sha512-NuaNSa6flKT5JaSYQzJok04JzTL1CA6aGhv5rfLW3PgqA+M2ChpZQnAC8h8i4ZFkBS8X5RqkDBHA7r4hej3K9A==", - "dev": true, + "devOptional": true, "funding": [ { "type": "github", @@ -14481,7 +14523,7 @@ "version": "1.0.4", "resolved": "https://registry.npmjs.org/reusify/-/reusify-1.0.4.tgz", "integrity": "sha512-U9nH88a3fc/ekCF1l0/UP1IosiuIjyTh7hBvXVMHYgVcfGvt897Xguj2UOLDeI5BG2m7/uwyaLVT6fbtCwTyzw==", - "dev": true, + "devOptional": true, "engines": { "iojs": ">=1.0.0", "node": ">=0.10.0" @@ -14953,7 +14995,7 @@ "version": "1.2.0", "resolved": "https://registry.npmjs.org/run-parallel/-/run-parallel-1.2.0.tgz", "integrity": "sha512-5l4VyZR86LZ/lDxZTR6jqL8AFE2S0IFLMP26AbjsLVADxHdhB/c0GUsH+y39UfCi3dzz8OlQuPmnaJOMoDHQBA==", - "dev": true, + "devOptional": true, "funding": [ { "type": "github", @@ -15118,6 +15160,16 @@ "url": "https://opencollective.com/webpack" } }, + "node_modules/segfault-handler": { + "version": "1.3.0", + "resolved": "https://registry.npmjs.org/segfault-handler/-/segfault-handler-1.3.0.tgz", + "integrity": "sha512-p7kVHo+4uoYkr0jmIiTBthwV5L2qmWtben/KDunDZ834mbos+tY+iO0//HpAJpOFSQZZ+wxKWuRo4DxV02B7Lg==", + "hasInstallScript": true, + "dependencies": { + "bindings": "^1.2.1", + "nan": "^2.14.0" + } + }, "node_modules/semantic-release": { "version": "24.0.0", "resolved": "https://registry.npmjs.org/semantic-release/-/semantic-release-24.0.0.tgz", @@ -16443,6 +16495,21 @@ "node": ">=8" } }, + "node_modules/sirv": { + "version": "2.0.4", + "resolved": "https://registry.npmjs.org/sirv/-/sirv-2.0.4.tgz", + "integrity": "sha512-94Bdh3cC2PKrbgSOUqTiGPWVZeSiXfKOVZNJniWoqrWrRkB1CJzBU3NEbiTsPcYy1lDsANA/THzS+9WBiy5nfQ==", + "optional": true, + "peer": true, + "dependencies": { + "@polka/url": "^1.0.0-next.24", + "mrmime": "^2.0.0", + "totalist": "^3.0.0" + }, + "engines": { + "node": ">= 10" + } + }, "node_modules/skin-tone": { "version": "2.0.0", "resolved": "https://registry.npmjs.org/skin-tone/-/skin-tone-2.0.0.tgz", @@ -17251,7 +17318,7 @@ "version": "5.0.1", "resolved": "https://registry.npmjs.org/to-regex-range/-/to-regex-range-5.0.1.tgz", "integrity": "sha512-65P7iz6X5yEr1cwcgvQxbbIw7Uk3gOy5dIdtZ4rDveLqhrdJP+Li/Hx6tyK0NEb+2GCyneCMJiGqrADCSNk8sQ==", - "dev": true, + "devOptional": true, "dependencies": { "is-number": "^7.0.0" }, @@ -17259,6 +17326,16 @@ "node": ">=8.0" } }, + "node_modules/totalist": { + "version": "3.0.1", + "resolved": "https://registry.npmjs.org/totalist/-/totalist-3.0.1.tgz", + "integrity": "sha512-sf4i37nQ2LBx4m3wB74y+ubopq6W/dIzXg0FDGjsYnZHVa1Da8FH853wlL2gtUhg+xJXjfk3kUZS3BRoQeoQBQ==", + "optional": true, + "peer": true, + "engines": { + "node": ">=6" + } + }, "node_modules/tr46": { "version": "0.0.3", "resolved": "https://registry.npmjs.org/tr46/-/tr46-0.0.3.tgz", @@ -18425,6 +18502,23 @@ "integrity": "sha512-gvVzJFlPycKc5dZN4yPkP8w7Dc37BtP1yczEneOb4uq34pXZcvrtRTmWV8W+Ume+XCxKgbjM+nevkyFPMybd4Q==", "dev": true }, + "node_modules/wrap-ansi": { + "version": "7.0.0", + "resolved": "https://registry.npmjs.org/wrap-ansi/-/wrap-ansi-7.0.0.tgz", + "integrity": "sha512-YVGIj2kamLSTxw6NsZjoBxfSwsn0ycdesmc4p+Q21c5zPuZ1pl+NfxVdxPtdHvmNVOQ6XSYG4AUtyt/Fi7D16Q==", + "dev": true, + "dependencies": { + "ansi-styles": "^4.0.0", + "string-width": "^4.1.0", + "strip-ansi": "^6.0.0" + }, + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/chalk/wrap-ansi?sponsor=1" + } + }, "node_modules/wrap-ansi-cjs": { "name": "wrap-ansi", "version": "7.0.0", diff --git a/package.json b/package.json index cb0f42fd..e1289767 100644 --- a/package.json +++ b/package.json @@ -22,7 +22,10 @@ "workspaces": [ "packages/*" ], - "dependencies": {}, + "dependencies": { + "gdb": "^0.1.0-pre", + "segfault-handler": "^1.3.0" + }, "devDependencies": { "@bndynet/typedoc-theme": "^0.0.1", "@rollup/plugin-commonjs": "^25.0.0", diff --git a/packages/indexdb/tests/init.test.ts b/packages/indexdb/tests/init.test.ts index 67e79057..11f13d3e 100644 --- a/packages/indexdb/tests/init.test.ts +++ b/packages/indexdb/tests/init.test.ts @@ -12,7 +12,7 @@ describe("Testing suite", () => { runTestSuite({ describe, it, beforeEach, afterEach }, { - name: 'leveldb', + name: 'indexdb', getStorage() { return IndexDB }, diff --git a/packages/indexdb/tests/setup.ts b/packages/indexdb/tests/setup.ts index 1361d2a6..5f8f27b0 100644 --- a/packages/indexdb/tests/setup.ts +++ b/packages/indexdb/tests/setup.ts @@ -16,4 +16,5 @@ Object.defineProperty(globalThis, "crypto", { }, }); -Object.assign(global, { TextDecoder, TextEncoder }); +const _TextUtils = JSON.parse(JSON.stringify({ TextDecoder, TextEncoder })) +Object.assign(global, _TextUtils) diff --git a/packages/inmemory/tests/setup.ts b/packages/inmemory/tests/setup.ts index 164f4a39..eaa588f6 100644 --- a/packages/inmemory/tests/setup.ts +++ b/packages/inmemory/tests/setup.ts @@ -15,4 +15,5 @@ Object.defineProperty(globalThis, "crypto", { }, }); -Object.assign(global, { TextDecoder, TextEncoder }); +const _TextUtils = JSON.parse(JSON.stringify({ TextDecoder, TextEncoder })) +Object.assign(global, _TextUtils) diff --git a/packages/leveldb/tests/init.test.ts b/packages/leveldb/tests/init.test.ts index b2635281..1fa9b086 100644 --- a/packages/leveldb/tests/init.test.ts +++ b/packages/leveldb/tests/init.test.ts @@ -4,6 +4,9 @@ import { describe, it, beforeEach, afterEach } from 'vitest'; import { runTestSuite } from '@pluto-encrypted/test-suite'; import { createLevelDBStorage } from '../src' +const SegfaultHandler = require('segfault-handler'); +SegfaultHandler.registerHandler('crash.log'); + describe("Testing suite", () => { describe("Level with dbPath", () => { runTestSuite({ diff --git a/packages/leveldb/tests/setup.ts b/packages/leveldb/tests/setup.ts index 164f4a39..30df1505 100644 --- a/packages/leveldb/tests/setup.ts +++ b/packages/leveldb/tests/setup.ts @@ -1,9 +1,15 @@ import "fake-indexeddb/auto"; import { TextEncoder, TextDecoder } from "util"; + import { addRxPlugin } from "rxdb"; import { RxDBDevModePlugin } from "rxdb/plugins/dev-mode"; import nodeCrypto from "crypto"; +// set up segfault handler +const SegfaultHandler = require('segfault-handler'); + +SegfaultHandler.registerHandler("crash.log"); // With no argument, SegfaultHandler will generate a generic log file name + if (process.env.NODE_ENV === "debug") { addRxPlugin(RxDBDevModePlugin); } @@ -15,4 +21,5 @@ Object.defineProperty(globalThis, "crypto", { }, }); -Object.assign(global, { TextDecoder, TextEncoder }); +const _TextUtils = JSON.parse(JSON.stringify({ TextDecoder, TextEncoder })) +Object.assign(global, _TextUtils) diff --git a/packages/test-suite/src/helper/index.ts b/packages/test-suite/src/helper/index.ts index e6353bfe..820c7eaa 100644 --- a/packages/test-suite/src/helper/index.ts +++ b/packages/test-suite/src/helper/index.ts @@ -8,7 +8,6 @@ import type { RxDocumentData, RxJsonSchema, RxStorage, - RxStorageInstance, } from 'rxdb' import { createRxDatabase, @@ -240,29 +239,19 @@ export function testCorrectQueries( testStorage: RxTestStorage, input: TestCorrectQueriesInput ) { - const { it, describe, beforeEach, afterEach } = suite - let storage: RxStorage - let storageInstance: RxStorageInstance | undefined + const { it, describe } = suite + // let storage: RxStorage + // let storageInstance: RxStorageInstance | undefined describe(`Testing - ${input.testTitle}`, () => { - beforeEach(async () => { - storage = await testStorage.getStorage() - }) - - afterEach(async () => { - if (storageInstance) { - await storageInstance.cleanup(Infinity) - await storageInstance.close() - } - }) - if (input.notRunIfTrue && input.notRunIfTrue()) { return } - it(input.testTitle, async ({ expect }) => { + it(input.testTitle, async () => { const schema = fillWithDefaultSettings(clone(input.schema)); const primaryPath = getPrimaryFieldOfPrimaryKey(schema.primaryKey); + const storage = await testStorage.getStorage() const storageInstance = await storage.createStorageInstance({ databaseInstanceToken: randomCouchString(10), databaseName: randomCouchString(12), @@ -372,6 +361,7 @@ export function testCorrectQueries( } // Test output of RxStorageInstance.query(); + // TODO: queries arent returning anything const resultFromStorage = await storageInstance.query(preparedQuery); const resultIds = resultFromStorage.documents.map(d => (d as any)[primaryPath]); try { @@ -421,6 +411,7 @@ export function testCorrectQueries( } await Promise.all([ database.remove(), + storageInstance.cleanup(Infinity), storageInstance.close() ]); }) diff --git a/packages/test-suite/src/index.ts b/packages/test-suite/src/index.ts index ad064bb3..7fc9d8af 100644 --- a/packages/test-suite/src/index.ts +++ b/packages/test-suite/src/index.ts @@ -9,9 +9,7 @@ import type { RxDocumentData, RxDocumentWriteData, RxJsonSchema, - RxStorage, RxStorageBulkWriteResponse, - RxStorageInstance, } from 'rxdb' import { clone, @@ -70,30 +68,18 @@ import { schemaObjects } from 'rxdb/plugins/test-utils' -let storage: RxStorage -let storageInstance: RxStorageInstance | undefined - export function runTestSuite(suite: TestSuite, testStorage: RxTestStorage): void { - const { describe, it, beforeEach, afterEach } = suite - describe('RxStorageInstance', () => { - beforeEach(async () => { - storage = await testStorage.getStorage() - }) - - afterEach(async () => { - if (storageInstance) { - await storageInstance.cleanup(Infinity) - storageInstance = undefined; - } - }) + const { describe, it } = suite - describe('creation', () => { + // tests out the storage module + describe('RxStorageInstance', () => { + describe('creation (single instance)', () => { it('open many instances on the same database name', async () => { const databaseName = randomCouchString(12) const amount = 20 - + for (let i = 0; i < amount; i++) { - const storageInstance = await testStorage.getStorage().createStorageInstance({ + const _storageInstance = await testStorage.getStorage().createStorageInstance({ databaseInstanceToken: randomCouchString(10), databaseName, collectionName: randomCouchString(12), @@ -103,16 +89,17 @@ export function runTestSuite(suite: TestSuite, testStorage: RxTestStorage): void devMode: false, password: randomCouchString(24) }) - - await storageInstance.cleanup(Infinity) - await storageInstance.close() + + await _storageInstance.cleanup(Infinity) + await _storageInstance.close() } }) - + it('open and close', async ({ expect }) => { const collectionName = randomCouchString(12) const databaseName = randomCouchString(12) - storageInstance = await storage.createStorageInstance({ + const _storage = await testStorage.getStorage() + const storageInstance = await _storage.createStorageInstance({ databaseInstanceToken: randomCouchString(10), databaseName, collectionName, @@ -124,1271 +111,1396 @@ export function runTestSuite(suite: TestSuite, testStorage: RxTestStorage): void }) expect(storageInstance.collectionName).toBe(collectionName) expect(storageInstance.databaseName).toBe(databaseName) - }) + await storageInstance.cleanup(Infinity) + // await storageInstance.close() + }) }) + }) - describe('.bulkWrite()', () => { - it('should write the document', async ({ expect }) => { - storageInstance = await storage.createStorageInstance({ - databaseInstanceToken: randomCouchString(10), - databaseName: randomCouchString(12), - collectionName: randomCouchString(12), - schema: getPseudoSchemaForVersion(0, 'key'), - options: {}, - multiInstance: true, - devMode: false - }) - const pkey = 'foobar' - const docData: RxDocumentWriteData = { - key: pkey, - value: 'barfoo1', - _deleted: false, - _meta: { - lwt: now() - }, - _rev: EXAMPLE_REVISION_1, - _attachments: {} - } - const writeResponse = await storageInstance.bulkWrite( - [{ - document: clone(docData) - }], - testContext - ) - expect(writeResponse.error).toStrictEqual([]) - const first = writeResponse.success.at(0); - expect(docData).toStrictEqual(first) + describe('.bulkWrite()', () => { + it('should write the document', async ({ expect }) => { + const _storage = await testStorage.getStorage() + const storageInstance = await _storage.createStorageInstance({ + databaseInstanceToken: randomCouchString(10), + databaseName: randomCouchString(12), + collectionName: randomCouchString(12), + schema: getPseudoSchemaForVersion(0, 'key'), + options: {}, + multiInstance: true, + devMode: false }) + const pkey = 'foobar' + const docData: RxDocumentWriteData = { + key: pkey, + value: 'barfoo1', + _deleted: false, + _meta: { + lwt: now() + }, + _rev: EXAMPLE_REVISION_1, + _attachments: {} + } + const writeResponse = await storageInstance.bulkWrite( + [{ + document: clone(docData) + }], + testContext + ) + expect(writeResponse.error).toStrictEqual([]) + const first = writeResponse.success.at(0); + expect(docData).toStrictEqual(first) + + await storageInstance.cleanup(Infinity) + // await storageInstance.close() + }) - it('should error on conflict', async ({ expect }) => { - storageInstance = await storage.createStorageInstance({ - databaseInstanceToken: randomCouchString(10), - databaseName: randomCouchString(12), - collectionName: randomCouchString(12), - schema: getPseudoSchemaForVersion(0, 'key'), - options: {}, - multiInstance: true, - devMode: false - }) - const pkey = 'foobar' - const writeData: RxDocumentWriteData = { - key: pkey, - value: 'barfoo', - _deleted: false, - _attachments: {}, - _rev: EXAMPLE_REVISION_1, - _meta: { - lwt: now() - } + it('should error on conflict', async ({ expect }) => { + const _storage = await testStorage.getStorage() + const storageInstance = await _storage.createStorageInstance({ + databaseInstanceToken: randomCouchString(10), + databaseName: randomCouchString(12), + collectionName: randomCouchString(12), + schema: getPseudoSchemaForVersion(0, 'key'), + options: {}, + multiInstance: true, + devMode: false + }) + const pkey = 'foobar' + const writeData: RxDocumentWriteData = { + key: pkey, + value: 'barfoo', + _deleted: false, + _attachments: {}, + _rev: EXAMPLE_REVISION_1, + _meta: { + lwt: now() } + } - await storageInstance.bulkWrite( - [{ - document: writeData - }], - testContext - ) - const writeResponse = await storageInstance.bulkWrite( - [{ - document: writeData - }], - testContext - ) - expect(writeResponse.success).toStrictEqual([]) - expect(writeResponse.error.at(0)).not.toBe(undefined) - const first = writeResponse.error.at(0)! - - expect(first.status).toBe(409) - expect(first.documentId).toBe(pkey) - - /** - * The conflict error state must contain the - * document state in the database. - * This ensures that we can continue resolving the conflict - * without having to pull the document out of the db first. - */ - expect((first as any).documentInDb.value).toBe(writeData.value) - - /** - * The documentInDb must not have any additional attributes. - * Some RxStorage implementations store meta fields - * together with normal document data. - * These fields must never be leaked to 409 conflict errors - */ - expect(Object.keys((first as any).documentInDb).sort()).toStrictEqual(Object.keys(writeData).sort()) - }) + await storageInstance.bulkWrite( + [{ + document: writeData + }], + testContext + ) + const writeResponse = await storageInstance.bulkWrite( + [{ + document: writeData + }], + testContext + ) + + expect(writeResponse.success).toStrictEqual([]) + expect(writeResponse.error.at(0)).not.toBe(undefined) + const first = writeResponse.error.at(0)! + + expect(first.status).toBe(409) + expect(first.documentId).toBe(pkey) - it('when inserting the same document at the same time, the first call must succeed while the second has a conflict', async ({ expect }) => { - storageInstance = await storage.createStorageInstance({ - databaseInstanceToken: randomCouchString(10), - databaseName: randomCouchString(12), - collectionName: randomCouchString(12), - schema: getPseudoSchemaForVersion(0, 'key'), - options: {}, - multiInstance: true, - devMode: false - }) - const pkey = 'foobar' - const writeData: RxDocumentWriteData = { - key: pkey, - value: 'barfoo', - _deleted: false, - _attachments: {}, - _rev: EXAMPLE_REVISION_1, - _meta: { - lwt: now() - } - } + /** + * The conflict error state must contain the + * document state in the database. + * This ensures that we can continue resolving the conflict + * without having to pull the document out of the db first. + */ + expect((first as any).documentInDb.value).toBe(writeData.value) - const first = await storageInstance.bulkWrite( - [{ - document: Object.assign({}, writeData, { - value: 'first' - }) - }], - testContext - ) + /** + * The documentInDb must not have any additional attributes. + * Some RxStorage implementations store meta fields + * together with normal document data. + * These fields must never be leaked to 409 conflict errors + */ + expect(Object.keys((first as any).documentInDb).sort()).toStrictEqual(Object.keys(writeData).sort()) - const second = await storageInstance.bulkWrite( - [{ - document: Object.assign({}, writeData, { - value: 'second' - }) - }], - testContext - ) + await storageInstance.cleanup(Infinity) + // await storageInstance.close() + }) - expect(first.error).toStrictEqual([]) - expect(first.success.at(0)!.value).toBe('first') - expect(second.error.at(0)!.status).toBe(409) + it('when inserting the same document at the same time, the first call must succeed while the second has a conflict', async ({ expect }) => { + const _storage = await testStorage.getStorage() + const storageInstance = await _storage.createStorageInstance({ + databaseInstanceToken: randomCouchString(10), + databaseName: randomCouchString(12), + collectionName: randomCouchString(12), + schema: getPseudoSchemaForVersion(0, 'key'), + options: {}, + multiInstance: true, + devMode: false }) + const pkey = 'foobar' + const writeData: RxDocumentWriteData = { + key: pkey, + value: 'barfoo', + _deleted: false, + _attachments: {}, + _rev: EXAMPLE_REVISION_1, + _meta: { + lwt: now() + } + } - it('should not find the deleted document when findDocumentsById(false)', async ({ expect }) => { - storageInstance = await storage.createStorageInstance({ - databaseInstanceToken: randomCouchString(10), - databaseName: randomCouchString(12), - collectionName: randomCouchString(12), - schema: getPseudoSchemaForVersion(0, 'key'), - options: {}, - multiInstance: true, - devMode: false - }) + const first = await storageInstance.bulkWrite( + [{ + document: Object.assign({}, writeData, { + value: 'first' + }) + }], + testContext + ) + + const second = await storageInstance.bulkWrite( + [{ + document: Object.assign({}, writeData, { + value: 'second' + }) + }], + testContext + ) - const pkey = 'foobar' - // make an insert - const insertData = { - key: pkey, - value: 'barfoo1', - _deleted: false, - _rev: EXAMPLE_REVISION_1, - _attachments: {}, - _meta: { - lwt: now() - } - } - const insertResponse = await storageInstance.bulkWrite( - [{ - document: insertData - }], - testContext - ) + expect(first.error).toStrictEqual([]) + expect(first.success.at(0)!.value).toBe('first') + expect(second.error.at(0)!.status).toBe(409) - expect(insertResponse.error).toStrictEqual([]) - const first = insertResponse.success.at(0)! + await storageInstance.cleanup(Infinity) + // await storageInstance.close() + }) - // make an update - const updateData = Object.assign({}, insertData, { - value: 'barfoo2', - _rev: EXAMPLE_REVISION_2, + it('should not find the deleted document when findDocumentsById(false)', async ({ expect }) => { + const _storage = await testStorage.getStorage() + const storageInstance = await _storage.createStorageInstance({ + databaseInstanceToken: randomCouchString(10), + databaseName: randomCouchString(12), + collectionName: randomCouchString(12), + schema: getPseudoSchemaForVersion(0, 'key'), + options: {}, + multiInstance: true, + devMode: false + }) + + const pkey = 'foobar' + // make an insert + const insertData = { + key: pkey, + value: 'barfoo1', + _deleted: false, + _rev: EXAMPLE_REVISION_1, + _attachments: {}, + _meta: { + lwt: now() + } + } + const insertResponse = await storageInstance.bulkWrite( + [{ + document: insertData + }], + testContext + ) + + expect(insertResponse.error).toStrictEqual([]) + const first = insertResponse.success.at(0)! + + // make an update + const updateData = Object.assign({}, insertData, { + value: 'barfoo2', + _rev: EXAMPLE_REVISION_2, + _meta: { + lwt: now() + } + }) + + const updateResponse = await storageInstance.bulkWrite( + [{ + previous: insertData, + document: updateData + }], + testContext + ) + + expect(updateResponse.error).toStrictEqual([]) + + // make the delete + const toDelete = { + previous: updateData, + document: Object.assign({}, first, { + value: 'barfoo_deleted', + _deleted: true, + _rev: EXAMPLE_REVISION_3, _meta: { - lwt: now() + lwt: now(), } }) + } - const updateResponse = await storageInstance.bulkWrite( - [{ - previous: insertData, - document: updateData - }], - testContext - ) - - expect(updateResponse.error).toStrictEqual([]) + const deleteResponse = await storageInstance.bulkWrite( + [toDelete], + testContext + ) - // make the delete - const toDelete = { - previous: updateData, - document: Object.assign({}, first, { - value: 'barfoo_deleted', - _deleted: true, - _rev: EXAMPLE_REVISION_3, - _meta: { - lwt: now(), - } - }) - } + expect(deleteResponse.error).toStrictEqual([]) - const deleteResponse = await storageInstance.bulkWrite( - [toDelete], - testContext - ) + const foundDoc = await storageInstance.findDocumentsById([pkey], false) - expect(deleteResponse.error).toStrictEqual([]) + expect(foundDoc).toStrictEqual([]) - const foundDoc = await storageInstance.findDocumentsById([pkey], false) + await storageInstance.cleanup(Infinity) + // await storageInstance.close() + }) - expect(foundDoc).toStrictEqual([]) + it('should be able to unset a property', async ({ expect }) => { + const schema = getTestDataSchema() + schema.required = ['key'] + + const _storage = await testStorage.getStorage() + const storageInstance = await _storage.createStorageInstance({ + databaseInstanceToken: randomCouchString(10), + databaseName: randomCouchString(12), + collectionName: randomCouchString(12), + schema: schema as any, + options: {}, + multiInstance: true, + devMode: false }) - - it('should be able to unset a property', async ({ expect }) => { - const schema = getTestDataSchema() - schema.required = ['key'] - - storageInstance = await storage.createStorageInstance({ - databaseInstanceToken: randomCouchString(10), - databaseName: randomCouchString(12), - collectionName: randomCouchString(12), - schema: schema as any, - options: {}, - multiInstance: true, - devMode: false - }) - const docId = 'foobar' - const insertData: RxDocumentWriteData = { - key: docId, - value: 'barfoo1', - _attachments: {}, - _deleted: false, - _rev: EXAMPLE_REVISION_1, - _meta: { - lwt: now() - } + const docId = randomString(10) + const insertData: RxDocumentWriteData = { + key: docId, + value: 'barfoo1', + _attachments: {}, + _deleted: false, + _rev: EXAMPLE_REVISION_1, + _meta: { + lwt: now() } - const writeResponse = await storageInstance.bulkWrite( - [{ - document: insertData - }], - testContext - ) - expect(writeResponse.success.at(0)).not.toBe(undefined) - const insertResponse = writeResponse.success.at(0) - const insertDataAfterWrite: RxDocumentData = Object.assign( - {}, - insertResponse, - { - _rev: insertResponse._rev - } - ) - const updateResponse = await storageInstance.bulkWrite( - [{ - previous: insertDataAfterWrite, - document: { - key: docId, - _attachments: {}, - _deleted: false, - _rev: EXAMPLE_REVISION_2, - _meta: { - lwt: now(), - - } + } + const writeResponse = await storageInstance.bulkWrite( + [{ + document: insertData + }], + testContext + ) + expect(writeResponse.success.at(0)).not.toBe(undefined) + const insertResponse = writeResponse.success.at(0) + const insertDataAfterWrite: RxDocumentData = Object.assign( + {}, + insertResponse, + { + _rev: insertResponse!._rev + } + ) + const updateResponse = await storageInstance.bulkWrite( + [{ + previous: insertDataAfterWrite, + document: { + key: docId, + _attachments: {}, + _deleted: false, + _rev: EXAMPLE_REVISION_2, + _meta: { + lwt: now(), } - }], - testContext - ) - expect(updateResponse.success.at(0)).not.toBe(undefined) - - const updateResponseDoc = updateResponse.success.at(0)! - delete (updateResponseDoc)._deleted - delete (updateResponseDoc)._rev - delete (updateResponseDoc)._meta - expect(updateResponseDoc).toStrictEqual({ - key: docId, - _attachments: {} - }) - }) - - it('should be able to do a write where only _meta fields are changed', async ({ expect }) => { - const databaseInstanceToken = randomCouchString(10) - storageInstance = await storage.createStorageInstance({ - databaseInstanceToken, - databaseName: randomCouchString(12), - collectionName: randomCouchString(12), - schema: getPseudoSchemaForVersion(0, 'key'), - options: {}, - multiInstance: true, - devMode: false - }) - - const key = 'foobar' - let docData: RxDocumentData = { - key, - value: 'barfoo1', - _attachments: {}, - _deleted: false, - _rev: EXAMPLE_REVISION_1, - _meta: { - lwt: now(), - foobar: 0 } - } - docData._rev = createRevision(databaseInstanceToken) + }], + testContext + ) + expect(updateResponse.success.at(0)).not.toBe(undefined) - const res1 = await storageInstance.bulkWrite( - [{ - document: clone(docData) - }], - testContext - ) - expect(res1.error).toStrictEqual([]) - - // change once - let newDocData: RxDocumentData = clone(docData) - newDocData._meta.foobar = 1 - newDocData._meta.lwt = now() - newDocData._rev = createRevision(databaseInstanceToken, docData) - - const res2 = await storageInstance.bulkWrite( - [{ - previous: docData, - document: clone(newDocData) - }], - testContext - ) - expect(res2.error).toStrictEqual([]) - docData = newDocData - - // change again - newDocData = clone(docData) - newDocData._meta.foobar = 2 - newDocData._meta.lwt = now() - newDocData._rev = createRevision(databaseInstanceToken, docData) - - expect(parseRevision(newDocData._rev).height).toBe(3) - - const res3 = await storageInstance.bulkWrite( - [{ - previous: docData, - document: clone(newDocData) - }], - testContext - ) - expect(res3.error).toStrictEqual([]) + const updateResponseDoc = updateResponse.success.at(0)! - docData = newDocData + // TODO throwing error The operand of a 'delete' operator must be optional. + // delete updateResponseDoc._deleted + // delete (updateResponseDoc)._rev + // delete (updateResponseDoc)._meta - const viaStorage = await storageInstance.findDocumentsById([key], true) - const viaStorageDoc = ensureNotFalsy(viaStorage.at(0)) - expect(parseRevision(viaStorageDoc._rev).height).toBe(3) + expect(updateResponseDoc).toStrictEqual({ + key: docId, + _attachments: {} }) - it('should be able to create another instance after a write', async () => { - const databaseName = randomCouchString(12) - storageInstance = await storage.createStorageInstance({ - databaseInstanceToken: randomCouchString(10), - databaseName, - collectionName: randomCouchString(12), - schema: getPseudoSchemaForVersion(0, 'key'), - options: {}, - multiInstance: true, - devMode: false - }) - const docData: RxDocumentWriteData = { - key: 'foobar', - value: 'barfoo1', - _attachments: {}, - _deleted: false, - _rev: EXAMPLE_REVISION_1, - _meta: { - lwt: now() - } - } - await storageInstance.bulkWrite( - [{ - document: clone(docData) - }], - testContext - ) - const storageInstance2 = await storage.createStorageInstance({ - databaseInstanceToken: randomCouchString(10), - databaseName, - collectionName: randomCouchString(12), - schema: getPseudoSchemaForVersion(0, 'key'), - options: {}, - multiInstance: true, - devMode: false - }) - await storageInstance2.bulkWrite( - [{ - document: Object.assign( - clone(docData), - { - _rev: EXAMPLE_REVISION_2 - } - ) - }], - testContext - ) + await storageInstance.cleanup(Infinity) + }) - await Promise.all([ - storageInstance2.cleanup(Infinity).then(async () => { await storageInstance2.close() }) - ]) + it('should be able to do a write where only _meta fields are changed', async ({ expect }) => { + const databaseInstanceToken = randomCouchString(10) + + const _storage = await testStorage.getStorage() + const storageInstance = await _storage.createStorageInstance({ + databaseInstanceToken, + databaseName: randomCouchString(12), + collectionName: randomCouchString(12), + schema: getPseudoSchemaForVersion(0, 'key'), + options: {}, + multiInstance: true, + devMode: false }) - it('should be able to jump more then 1 revision height in a single write operation', async ({ expect }) => { - storageInstance = await storage.createStorageInstance({ - databaseInstanceToken: randomCouchString(10), - databaseName: randomCouchString(12), - collectionName: randomCouchString(12), - schema: getPseudoSchemaForVersion(0, 'key'), - options: {}, - multiInstance: true, - devMode: false - }) - const pkey = 'foobar' - const docData: RxDocumentData = { - key: pkey, - value: 'barfoo1', - _deleted: false, - _meta: { - lwt: now() - }, - _rev: EXAMPLE_REVISION_1, - _attachments: {} + const key = 'foobar' + let docData: RxDocumentData = { + key, + value: 'barfoo1', + _attachments: {}, + _deleted: false, + _rev: EXAMPLE_REVISION_1, + _meta: { + lwt: now(), + foobar: 0 } - const insertResponse = await storageInstance.bulkWrite( - [{ - document: clone(docData) - }], - testContext - ) - expect(insertResponse.error).toStrictEqual([]) - - // update - const updated = flatCloneDocWithMeta(docData) - updated.value = 'barfoo2' - updated._meta.lwt = now() - updated._rev = EXAMPLE_REVISION_4 - const updateResponse = await storageInstance.bulkWrite( - [{ - previous: docData, - document: updated - }], - testContext - ) + } + docData._rev = createRevision(databaseInstanceToken) + + const res1 = await storageInstance.bulkWrite( + [{ + document: clone(docData) + }], + testContext + ) + expect(res1.error).toStrictEqual([]) + + // change once + let newDocData: RxDocumentData = clone(docData) + newDocData._meta.foobar = 1 + newDocData._meta.lwt = now() + newDocData._rev = createRevision(databaseInstanceToken, docData) + + const res2 = await storageInstance.bulkWrite( + [{ + previous: docData, + document: clone(newDocData) + }], + testContext + ) + expect(res2.error).toStrictEqual([]) + docData = newDocData + + // change again + newDocData = clone(docData) + newDocData._meta.foobar = 2 + newDocData._meta.lwt = now() + newDocData._rev = createRevision(databaseInstanceToken, docData) + + expect(parseRevision(newDocData._rev).height).toBe(3) + + const res3 = await storageInstance.bulkWrite( + [{ + previous: docData, + document: clone(newDocData) + }], + testContext + ) + expect(res3.error).toStrictEqual([]) + + docData = newDocData + + const viaStorage = await storageInstance.findDocumentsById([key], true) + const viaStorageDoc = ensureNotFalsy(viaStorage.at(0)) + expect(parseRevision(viaStorageDoc._rev).height).toBe(3) + + await storageInstance.cleanup(Infinity) + // await storageInstance.close() + }) - expect(updateResponse.error).toStrictEqual([]) + it('should be able to create another instance after a write', async () => { + const databaseName = randomCouchString(12) + const _storage = await testStorage.getStorage() + const storageInstance = await _storage.createStorageInstance({ + databaseInstanceToken: randomCouchString(10), + databaseName, + collectionName: randomCouchString(12), + schema: getPseudoSchemaForVersion(0, 'key'), + options: {}, + multiInstance: true, + devMode: false + }) + const docData: RxDocumentWriteData = { + key: 'foobar', + value: 'barfoo1', + _attachments: {}, + _deleted: false, + _rev: EXAMPLE_REVISION_1, + _meta: { + lwt: now() + } + } + await storageInstance.bulkWrite( + [{ + document: clone(docData) + }], + testContext + ) + const storageInstance2 = await _storage.createStorageInstance({ + databaseInstanceToken: randomCouchString(10), + databaseName, + collectionName: randomCouchString(12), + schema: getPseudoSchemaForVersion(0, 'key'), + options: {}, + multiInstance: true, + devMode: false + }) + await storageInstance2.bulkWrite( + [{ + document: Object.assign( + clone(docData), + { + _rev: EXAMPLE_REVISION_2 + } + ) + }], + testContext + ) - // find again - const getDocFromDb = await storageInstance.findDocumentsById([docData.key], false) + await Promise.all([ + storageInstance2.cleanup(Infinity).then(async () => { await storageInstance2.close() }) + ]) - expect(getDocFromDb.at(0)).not.toBe(undefined) - const docFromDb = getDocFromDb.at(0)! + await storageInstance.cleanup(Infinity) + await storageInstance2.cleanup(Infinity) + // await storageInstance.close() + }) - expect(docFromDb._rev).toEqual(EXAMPLE_REVISION_4) + it('should be able to jump more then 1 revision height in a single write operation', async ({ expect }) => { + const _storage = await testStorage.getStorage() + const storageInstance = await _storage.createStorageInstance({ + databaseInstanceToken: randomCouchString(10), + databaseName: randomCouchString(12), + collectionName: randomCouchString(12), + schema: getPseudoSchemaForVersion(0, 'key'), + options: {}, + multiInstance: true, + devMode: false }) + const pkey = 'foobar' + const docData: RxDocumentData = { + key: pkey, + value: 'barfoo1', + _deleted: false, + _meta: { + lwt: now() + }, + _rev: EXAMPLE_REVISION_1, + _attachments: {} + } + const insertResponse = await storageInstance.bulkWrite( + [{ + document: clone(docData) + }], + testContext + ) + expect(insertResponse.error).toStrictEqual([]) + + // update + const updated = flatCloneDocWithMeta(docData) + updated.value = 'barfoo2' + updated._meta.lwt = now() + updated._rev = EXAMPLE_REVISION_4 + const updateResponse = await storageInstance.bulkWrite( + [{ + previous: docData, + document: updated + }], + testContext + ) + + expect(updateResponse.error).toStrictEqual([]) + + // find again + const getDocFromDb = await storageInstance.findDocumentsById([docData.key], false) + + expect(getDocFromDb.at(0)).not.toBe(undefined) + const docFromDb = getDocFromDb.at(0)! + + expect(docFromDb._rev).toEqual(EXAMPLE_REVISION_4) + + await storageInstance.cleanup(Infinity) + // await storageInstance.close() + }) - it('must be able create multiple storage instances on the same database and write documents', async () => { - const collectionsAmount = 3 - const docsAmount = 3 - const databaseName = randomCouchString(10) - const databaseInstanceToken = randomCouchString(10) - - await Promise.all( - new Array(collectionsAmount) - .fill(0) - .map(async () => { - const storageInstance = await storage.createStorageInstance({ - databaseInstanceToken, - databaseName, - collectionName: randomCouchString(12), - schema: getPseudoSchemaForVersion(0, 'key'), - options: {}, - multiInstance: true, - devMode: false - }) - await Promise.all( - new Array(docsAmount) - .fill(0) - .map(async (_v, docId) => { - const writeData: RxDocumentWriteData = { - key: `${docId}`, - value: randomCouchString(5), - _rev: EXAMPLE_REVISION_1, - _deleted: false, - _meta: { - lwt: now() - }, - _attachments: {} - } - await storageInstance.bulkWrite([{ document: writeData }], testContext) - }) - ) - return storageInstance + it('must be able create multiple storage instances on the same database and write documents', async () => { + const collectionsAmount = 3 + const docsAmount = 3 + const databaseName = randomCouchString(10) + const databaseInstanceToken = randomCouchString(10) + + await Promise.all( + new Array(collectionsAmount) + .fill(0) + .map(async () => { + const _storage = await testStorage.getStorage() + const storageInstance = await _storage.createStorageInstance({ + databaseInstanceToken, + databaseName, + collectionName: randomCouchString(12), + schema: getPseudoSchemaForVersion(0, 'key'), + options: {}, + multiInstance: true, + devMode: false }) - ) - }, { timeout: 200000 }) + await Promise.all( + new Array(docsAmount) + .fill(0) + .map(async (_v, docId) => { + const writeData: RxDocumentWriteData = { + key: `${docId}`, + value: randomCouchString(5), + _rev: EXAMPLE_REVISION_1, + _deleted: false, + _meta: { + lwt: now() + }, + _attachments: {} + } + await storageInstance.bulkWrite([{ document: writeData }], testContext) + }) + ) + return storageInstance + }) + ) + // TODO: do we need to tidy up and close these instances? + // await storageInstance.cleanup(Infinity) + // await storageInstance.close() + }, { timeout: 200000 }) + + // Some storages had problems storing non-utf-8 chars like "é" + it('write and read with umlauts', async ({ expect }) => { + const _storage = await testStorage.getStorage() + const storageInstance = await _storage.createStorageInstance({ + databaseInstanceToken: randomCouchString(10), + databaseName: randomCouchString(12), + collectionName: randomCouchString(12), + schema: getPseudoSchemaForVersion(0, 'key'), + options: {}, + multiInstance: true, + devMode: false + }) + const umlauts = 'äöüßé' + const pkey = 'foobar' + umlauts + // insert + const docData: RxDocumentData = { + key: pkey, + value: 'value' + umlauts, + _deleted: false, + _meta: { + lwt: now() + }, + _rev: EXAMPLE_REVISION_1, + _attachments: {} + } + const insertResponse = await storageInstance.bulkWrite( + [{ + document: clone(docData) + }], + testContext + ) - // Some storages had problems storing non-utf-8 chars like "é" - it('write and read with umlauts', async ({ expect }) => { - storageInstance = await storage.createStorageInstance({ - databaseInstanceToken: randomCouchString(10), - databaseName: randomCouchString(12), - collectionName: randomCouchString(12), - schema: getPseudoSchemaForVersion(0, 'key'), - options: {}, - multiInstance: true, - devMode: false - }) - const umlauts = 'äöüßé' - const pkey = 'foobar' + umlauts - // insert - const docData: RxDocumentData = { - key: pkey, - value: 'value' + umlauts, - _deleted: false, - _meta: { - lwt: now() - }, - _rev: EXAMPLE_REVISION_1, - _attachments: {} - } - const insertResponse = await storageInstance.bulkWrite( - [{ - document: clone(docData) - }], - testContext - ) + expect(insertResponse.error).toStrictEqual([]) - expect(insertResponse.error).toStrictEqual([]) + // find again + const getDocFromDb = await storageInstance.findDocumentsById([docData.key], false) - // find again - const getDocFromDb = await storageInstance.findDocumentsById([docData.key], false) + expect(getDocFromDb.at(0)).not.toBe(undefined) - expect(getDocFromDb.at(0)).not.toBe(undefined) + const docFromDb = getDocFromDb.at(0) - const docFromDb = getDocFromDb.at(0) + // TODO: linter doesnt like this line + if (docFromDb) expect(docFromDb.value).toBe('value' + umlauts) - expect(docFromDb.value).toBe('value' + umlauts) + const pkey2 = 'foobar2' + umlauts + // store another doc + const docData2: RxDocumentData = { + key: pkey2, + value: 'value2' + umlauts, + _deleted: false, + _meta: { + lwt: now() + }, + _rev: EXAMPLE_REVISION_1, + _attachments: {} + } + await storageInstance.bulkWrite( + [{ + document: clone(docData2) + }], + testContext + ) + const getDocFromDb2 = await storageInstance.findDocumentsById([docData2.key], false) + + expect(getDocFromDb2.at(0)).not.toBe(undefined) + + await storageInstance.cleanup(Infinity) + // await storageInstance.close() + }) + }) - const pkey2 = 'foobar2' + umlauts - // store another doc - const docData2: RxDocumentData = { - key: pkey2, - value: 'value2' + umlauts, - _deleted: false, - _meta: { - lwt: now() + describe('.getSortComparator()', () => { + it('should sort in the correct order', async ({ expect }) => { + const _storage = await testStorage.getStorage() + const storageInstance = await _storage.createStorageInstance<{ + _id: string + age: number + }>({ + databaseInstanceToken: randomCouchString(10), + databaseName: randomCouchString(12), + collectionName: randomCouchString(12), + schema: fillWithDefaultSettings({ + version: 0, + type: 'object', + primaryKey: '_id', + properties: { + _id: { + type: 'string', + maxLength: 100 + }, + age: { + type: 'number' + } }, - _rev: EXAMPLE_REVISION_1, - _attachments: {} - } - await storageInstance.bulkWrite( - [{ - document: clone(docData2) - }], - testContext - ) - const getDocFromDb2 = await storageInstance.findDocumentsById([docData2.key], false) - - expect(getDocFromDb2.at(0)).not.toBe(undefined) + required: [ + '_id', + 'age' + ] + }), + options: {}, + multiInstance: true, + devMode: false }) + + const query: FilledMangoQuery = { + selector: {}, + limit: 1000, + sort: [ + { age: 'asc' } + ], + skip: 0 + } + const comparator = getSortComparator( + storageInstance.schema, + query + ) + + const doc1: any = human() + doc1._id = 'aa' + doc1.age = 1 + const doc2: any = human() + doc2._id = 'bb' + doc2.age = 100 + + // should sort in the correct order + expect([doc1, doc2]).toStrictEqual([doc1, doc2].sort(comparator)) + + await storageInstance.cleanup(Infinity) + // await storageInstance.close() }) + + it('should still sort in correct order when docs do not match the selector', async ({ expect }) => { + const _storage = await testStorage.getStorage() + const storageInstance = await _storage.createStorageInstance({ + databaseInstanceToken: randomCouchString(10), + databaseName: randomCouchString(12), + collectionName: randomCouchString(12), + schema: getTestDataSchema(), + options: {}, + multiInstance: true, + devMode: false + }) - describe('.getSortComparator()', () => { - it('should sort in the correct order', async ({ expect }) => { - storageInstance = await storage.createStorageInstance<{ - _id: string - age: number - }>({ - databaseInstanceToken: randomCouchString(10), - databaseName: randomCouchString(12), - collectionName: randomCouchString(12), - schema: fillWithDefaultSettings({ - version: 0, - type: 'object', - primaryKey: '_id', - properties: { - _id: { - type: 'string', - maxLength: 100 - }, - age: { - type: 'number' - } - }, - required: [ - '_id', - 'age' - ] - }), - options: {}, - multiInstance: true, - devMode: false - }) + const matchingValue = 'foobar' + const query: FilledMangoQuery = { + selector: { + value: { + $eq: matchingValue + } + }, + sort: [ + { key: 'asc' } + ], + skip: 0 + } - const query: FilledMangoQuery = { - selector: {}, - limit: 1000, - sort: [ - { age: 'asc' } - ], - skip: 0 + const comparator = getSortComparator( + storageInstance.schema, + query + ) + + const docs: TestDocType[] = [ + { + value: matchingValue, + key: 'aaa' + }, + { + value: 'barfoo', + key: 'bbb' } - const comparator = getSortComparator( - storageInstance.schema, - query - ) + ] + + const result = comparator( + docs[0]!, + docs[1]! - const doc1: any = human() - doc1._id = 'aa' - doc1.age = 1 - const doc2: any = human() - doc2._id = 'bb' - doc2.age = 100 + ) - // should sort in the correct order - expect([doc1, doc2]).toStrictEqual([doc1, doc2].sort(comparator)) + expect(result).toStrictEqual(-1) + + await storageInstance.cleanup(Infinity) + // await storageInstance.close() + }) + + it('should work with a more complex query', async ({ expect }) => { + const _storage = await testStorage.getStorage() + const storageInstance = await _storage.createStorageInstance({ + databaseInstanceToken: randomCouchString(10), + databaseName: randomCouchString(12), + collectionName: randomCouchString(12), + schema: getTestDataSchema(), + options: {}, + multiInstance: true, + devMode: false }) - it('should still sort in correct order when docs do not match the selector', async ({ expect }) => { - const storageInstance = await storage.createStorageInstance({ - databaseInstanceToken: randomCouchString(10), - databaseName: randomCouchString(12), - collectionName: randomCouchString(12), - schema: getTestDataSchema(), - options: {}, - multiInstance: true, - devMode: false - }) - const matchingValue = 'foobar' - const query: FilledMangoQuery = { - selector: { - value: { - $eq: matchingValue + const matchingValue = 'aaa' + const query: FilledMangoQuery = { + selector: { + $or: [ + { + value: matchingValue, + key: matchingValue + }, + { + value: 'barfoo', + key: 'barfoo' } - }, - sort: [ - { key: 'asc' } ], - skip: 0 + key: matchingValue + }, + sort: [ + { key: 'asc' } + ], + skip: 0 + } + + const comparator = getSortComparator( + storageInstance.schema, + query + ) + + const docs: TestDocType[] = [ + { + value: matchingValue, + key: matchingValue + }, + { + value: 'bbb', + key: 'bbb' } + ] - const comparator = getSortComparator( - storageInstance.schema, - query - ) + const result = comparator( + docs[0]!, + docs[1]! - const docs: TestDocType[] = [ - { - value: matchingValue, - key: 'aaa' - }, - { - value: 'barfoo', - key: 'bbb' + ) + + expect(result).toStrictEqual(-1) + + await storageInstance.cleanup(Infinity) + // await storageInstance.close() + }) + }) + + describe('.getQueryMatcher()', () => { + it('should match the right docs', async ({ expect }) => { + const _storage = await testStorage.getStorage() + const storageInstance = await _storage.createStorageInstance({ + databaseInstanceToken: randomCouchString(10), + databaseName: randomCouchString(12), + collectionName: randomCouchString(12), + schema: getPseudoSchemaForVersion(0, '_id' as any), + options: {}, + multiInstance: true, + devMode: false + }); + + const query: FilledMangoQuery = { + selector: { + age: { + $gt: 10, + $ne: 50 } - ] + }, + sort: [ + { _id: 'asc' } + ], + skip: 0 + }; + const matcher = getQueryMatcher( + storageInstance.schema, + query + ); + const doc1: any = schemaObjects.humanData(); + doc1._id = 'aa'; + doc1.age = 1; + const doc2: any = schemaObjects.humanData(); + doc2._id = 'bb'; + doc2.age = 100; + + assert.strictEqual(matcher(doc1), false); + assert.strictEqual(matcher(doc2), true); + + const schema = getNestedDocSchema() + const query2: FilledMangoQuery = { + selector: { + 'nes.ted': { + $eq: 'barfoo' + } + }, + sort: [ + { id: 'asc' } + ], + skip: 0 + } - const result = comparator( - docs[0]!, - docs[1]! + const queryMatcher = getQueryMatcher( + schema, + normalizeMangoQuery(schema, query2) + ) - ) + const notMatchingDoc = { + id: 'foobar', + nes: { + ted: 'xxx' + }, + _deleted: false, + _attachments: {}, + _rev: EXAMPLE_REVISION_1, + _meta: { + lwt: now() + } + } + const matchingDoc = { + id: 'foobar', + nes: { + ted: 'barfoo' + }, + _deleted: false, + _attachments: {}, + _rev: EXAMPLE_REVISION_1, + _meta: { + lwt: now() + } + } - expect(result).toStrictEqual(-1) + expect(queryMatcher(notMatchingDoc)).toStrictEqual(false) + expect(queryMatcher(matchingDoc)).toStrictEqual(true) + + await storageInstance.cleanup(Infinity) + // storageInstance.remove(); + // await storageInstance.close() + }) + }) + + + // TODO: tests failing because the query isnt returning any documents + describe('.query()', () => { + // TODO: tests failing because the query isnt returning any documents + it('should find all documents', async ({ expect }) => { + const _storage = await testStorage.getStorage() + const storageInstance = await _storage.createStorageInstance<{ key: string, value: string }>({ + databaseInstanceToken: randomCouchString(10), + databaseName: randomCouchString(12), + collectionName: randomCouchString(12), + schema: getPseudoSchemaForVersion<{ key: string, value: string }>(0, 'key'), + options: {}, + multiInstance: true, + devMode: false }) - it('should work with a more complex query', async ({ expect }) => { - const storageInstance = await storage.createStorageInstance({ - databaseInstanceToken: randomCouchString(10), - databaseName: randomCouchString(12), - collectionName: randomCouchString(12), - schema: getTestDataSchema(), - options: {}, - multiInstance: true, - devMode: false - }) - const matchingValue = 'aaa' - const query: FilledMangoQuery = { + const writeData = { + key: randomCouchString(12), + value: 'barfoo', + _deleted: false, + _attachments: {}, + _rev: EXAMPLE_REVISION_1, + _meta: { + lwt: now() + } + } + + const writeResponse1 = await storageInstance.bulkWrite( + [{ + document: writeData + }], + testContext + ) + + expect(writeResponse1.error).toStrictEqual([]) + const first = writeResponse1.success.at(0); + expect(writeData).toStrictEqual(first) + + const writeData2 = { + key: randomCouchString(12), + value: 'barfoo2', + _deleted: false, + _attachments: {}, + _rev: EXAMPLE_REVISION_1, + _meta: { + lwt: now() + } + } + + const writeResponse2 = await storageInstance.bulkWrite( + [{ + document: writeData2 + }], + testContext + ) + + expect(writeResponse2.error).toStrictEqual([]) + const second = writeResponse2.success.at(0); + expect(writeData2).toStrictEqual(second) + + // TODO: this query is not returning any documents + const preparedQuery = prepareQuery( + storageInstance.schema, + { selector: { - $or: [ - { - value: matchingValue, - key: matchingValue - }, - { - value: 'barfoo', - key: 'barfoo' - } - ], - key: matchingValue + _deleted: false }, - sort: [ - { key: 'asc' } - ], + sort: [{ key: 'asc' }], skip: 0 } - - const comparator = getSortComparator( - storageInstance.schema, - query - ) - - const docs: TestDocType[] = [ - { - value: matchingValue, - key: matchingValue - }, - { - value: 'bbb', - key: 'bbb' + ) + + const allDocs = await storageInstance.query(preparedQuery) + expect(allDocs.documents).toHaveLength(2) + + const first2 = allDocs.documents[0] + expect(first2).not.toBe(undefined) + expect(first2!.value).toBe('barfoo') + + await storageInstance.bulkWrite( + [{ + document: { + ...writeData2, + _deleted: true } - ] + }], + testContext + ) - const result = comparator( - docs[0]!, - docs[1]! - - ) + await storageInstance.cleanup(Infinity) + }) - expect(result).toStrictEqual(-1) + // TODO: tests failing because the query isnt returning any documents + it('should sort in the correct order', async ({ expect }) => { + const _storage = await testStorage.getStorage() + const storageInstance = await _storage.createStorageInstance<{ key: string, value: string }>({ + databaseInstanceToken: randomCouchString(10), + databaseName: randomCouchString(12), + collectionName: randomCouchString(12), + schema: getTestDataSchema(), + options: {}, + multiInstance: true, + devMode: false }) - }) - describe('.getQueryMatcher()', () => { - it('should match the right docs', async ({ expect }) => { - storageInstance = await storage.createStorageInstance({ - databaseInstanceToken: randomCouchString(10), - databaseName: randomCouchString(12), - collectionName: randomCouchString(12), - schema: getPseudoSchemaForVersion(0, '_id' as any), - options: {}, - multiInstance: true, - devMode: false - }); + await storageInstance.bulkWrite([ + { + document: getWriteData({ value: 'a' }) + }, + { + document: getWriteData({ value: 'b' }) + }, + { + document: getWriteData({ value: 'c' }) + } + ], testContext) - const query: FilledMangoQuery = { - selector: { - age: { - $gt: 10, - $ne: 50 - } - }, - sort: [ - { _id: 'asc' } - ], - skip: 0 - }; - const matcher = getQueryMatcher( - storageInstance.schema, - query - ); - const doc1: any = schemaObjects.humanData(); - doc1._id = 'aa'; - doc1.age = 1; - const doc2: any = schemaObjects.humanData(); - doc2._id = 'bb'; - doc2.age = 100; - - assert.strictEqual(matcher(doc1), false); - assert.strictEqual(matcher(doc2), true); - - storageInstance.remove(); - }) - it('should match the nested document', ({ expect }) => { - const schema = getNestedDocSchema() - const query: FilledMangoQuery = { - selector: { - 'nes.ted': { - $eq: 'barfoo' - } - }, + // TODO: query not returning any documents + const preparedQuery = prepareQuery( + storageInstance.schema, + { + selector: {}, sort: [ - { id: 'asc' } + { value: 'desc' } ], skip: 0 } + ) + const allDocs = await storageInstance.query(preparedQuery) - const queryMatcher = getQueryMatcher( - schema, - normalizeMangoQuery(schema, query) - ) + expect(allDocs.documents).toHaveLength(3) + expect(allDocs!.documents[0]!.value).toBe('c') + expect(allDocs!.documents[1]!.value).toBe('b') + expect(allDocs!.documents[2]!.value).toBe('a') - const notMatchingDoc = { - id: 'foobar', - nes: { - ted: 'xxx' + await storageInstance.cleanup(Infinity) + }) + + it('should have the same deterministic order of .query() and .getSortComparator()', async ({ expect }) => { + const schema: RxJsonSchema> = fillWithDefaultSettings({ + version: 0, + primaryKey: 'id', + type: 'object', + properties: { + id: { + type: 'string', + maxLength: 100 }, - _deleted: false, - _attachments: {}, - _rev: EXAMPLE_REVISION_1, - _meta: { - lwt: now() - } - } - const matchingDoc = { - id: 'foobar', - nes: { - ted: 'barfoo' + equal: { + type: 'string', + maxLength: 20, + enum: ['foobar'] }, - _deleted: false, - _attachments: {}, - _rev: EXAMPLE_REVISION_1, - _meta: { - lwt: now() + increment: { + type: 'number', + minimum: 0, + maximum: 1000, + multipleOf: 1 + }, + random: { + type: 'string', + maxLength: 100 } - } - - expect(queryMatcher(notMatchingDoc)).toStrictEqual(false) - expect(queryMatcher(matchingDoc)).toStrictEqual(true) + }, + indexes: [ + ['equal', 'id'], + ['increment', 'id'], + ['random', 'id'], + [ + 'equal', + 'increment', + 'id' + ] + ], + required: [ + 'id', + 'equal', + 'increment', + 'random' + ] }) - }) - - describe('.query()', () => { - it('should find all documents', async ({ expect }) => { - - storageInstance = await storage.createStorageInstance<{ key: string, value: string }>({ - databaseInstanceToken: randomCouchString(10), - databaseName: randomCouchString(12), - collectionName: randomCouchString(12), - schema: getPseudoSchemaForVersion<{ key: string, value: string }>(0, 'key'), - options: {}, - multiInstance: true, - devMode: false - }) - - const writeData = { - key: 'foobar', - value: 'barfoo', - _deleted: false, - _attachments: {}, - _rev: EXAMPLE_REVISION_1, - _meta: { - lwt: now() - } - } - await storageInstance.bulkWrite( - [{ - document: writeData - }], - testContext - ) + const _storage = await testStorage.getStorage() + const storageInstance = await _storage.createStorageInstance({ + databaseInstanceToken: randomCouchString(10), + databaseName: randomCouchString(12), + collectionName: randomCouchString(12), + schema, + options: {}, + multiInstance: true, + devMode: false + }) - const writeData2 = { - key: 'foobar2', - value: 'barfoo2', + const docsAmount = 6 + const docData: Array> = new Array(docsAmount) + .fill(0) + .map((_x, idx) => ({ + id: randomString(10), + equal: 'foobar', + random: randomString(10), + increment: idx + 1, _deleted: false, _attachments: {}, _rev: EXAMPLE_REVISION_1, _meta: { lwt: now() } - } - await storageInstance.bulkWrite( - [{ - document: writeData2 - }], - testContext - ) + })) + const writeResponse: RxStorageBulkWriteResponse = await storageInstance.bulkWrite( + docData.map(d => ({ document: d })), + testContext + ) + if (Object.keys(writeResponse.error).length > 0) { + throw new Error('could not save') + } + const docs = Object.values(writeResponse.success) + async function testQuery(query: FilledMangoQuery): Promise { const preparedQuery = prepareQuery( - storageInstance.schema, - { - selector: { - _deleted: false - }, - sort: [{ key: 'asc' }], - skip: 0 - } + storageInstance!.schema, + query ) - const allDocs = await storageInstance.query(preparedQuery) - const first = allDocs.documents[0] - - expect(first).not.toBe(undefined) - expect(first.value).toBe('barfoo') - - await storageInstance.bulkWrite( - [{ - document: { - ...writeData2, - _deleted: true - } - }], - testContext + const docsViaQuery = (await storageInstance!.query(preparedQuery)).documents + if (docsViaQuery.length !== docsAmount) { + throw new Error('docs missing') + } + const sortComparator = getSortComparator( + (storageInstance as any).schema, + query ) - }) - it('should sort in the correct order', async ({ expect }) => { - storageInstance = await storage.createStorageInstance<{ key: string, value: string }>({ - databaseInstanceToken: randomCouchString(10), - databaseName: randomCouchString(12), - collectionName: randomCouchString(12), - schema: getTestDataSchema(), - options: {}, - multiInstance: true, - devMode: false - }) + const docsViaSort = shuffleArray(docs).sort(sortComparator) + expect(docsViaQuery).toStrictEqual(docsViaSort) + } - await storageInstance.bulkWrite([ - { - document: getWriteData({ value: 'a' }) - }, - { - document: getWriteData({ value: 'b' }) - }, - { - document: getWriteData({ value: 'c' }) - } - ], testContext) + const queries: Array> = [ + { + selector: {}, + sort: [ + { id: 'asc' } + ], + skip: 0 + }, + { + selector: {}, + sort: [ + { equal: 'asc' }, + /** + * RxDB will always append the primaryKey as last sort parameter + * if the primary key is not used in the sorting before. + */ + { id: 'asc' } + ], + skip: 0 + }, + { + selector: {}, + sort: [ + { increment: 'desc' }, + { id: 'asc' } + ], + skip: 0 + }, + { + selector: {}, + sort: [ + { equal: 'asc' }, + { increment: 'desc' }, + { id: 'asc' } + ], + skip: 0 + } + ] + for (const query of queries) { + await testQuery(query) + } - const preparedQuery = prepareQuery( - storageInstance.schema, - { - selector: {}, - sort: [ - { value: 'desc' } - ], - skip: 0 - } - ) - const allDocs = await storageInstance.query(preparedQuery) + await storageInstance.cleanup(Infinity) + }) - expect(allDocs.documents.length).toBe(3) - expect(allDocs.documents[0].value).toBe('c') - expect(allDocs.documents[1].value).toBe('b') - expect(allDocs.documents[2].value).toBe('a') + it('should be able to search over a nested object', async ({ expect }) => { + const schema = getNestedDocSchema() + const _storage = await testStorage.getStorage() + const storageInstance = await _storage.createStorageInstance({ + databaseInstanceToken: randomCouchString(10), + databaseName: randomCouchString(12), + collectionName: randomCouchString(12), + schema, + options: {}, + multiInstance: true, + devMode: false }) - it('should have the same deterministic order of .query() and .getSortComparator()', async ({ expect }) => { - const schema: RxJsonSchema> = fillWithDefaultSettings({ - version: 0, - primaryKey: 'id', - type: 'object', - properties: { - id: { - type: 'string', - maxLength: 100 - }, - equal: { - type: 'string', - maxLength: 20, - enum: ['foobar'] - }, - increment: { - type: 'number', - minimum: 0, - maximum: 1000, - multipleOf: 1 + const insertResult = await storageInstance.bulkWrite([ + { + document: { + id: 'foobar', + nes: { + ted: 'barfoo' }, - random: { - type: 'string', - maxLength: 100 - } - }, - indexes: [ - ['equal', 'id'], - ['increment', 'id'], - ['random', 'id'], - [ - 'equal', - 'increment', - 'id' - ] - ], - required: [ - 'id', - 'equal', - 'increment', - 'random' - ] - }) - storageInstance = await storage.createStorageInstance({ - databaseInstanceToken: randomCouchString(10), - databaseName: randomCouchString(12), - collectionName: randomCouchString(12), - schema, - options: {}, - multiInstance: true, - devMode: false - }) - - const docsAmount = 6 - const docData: Array> = new Array(docsAmount) - .fill(0) - .map((_x, idx) => ({ - id: randomString(10), - equal: 'foobar', - random: randomString(10), - increment: idx + 1, _deleted: false, _attachments: {}, _rev: EXAMPLE_REVISION_1, _meta: { lwt: now() } - })) - const writeResponse: RxStorageBulkWriteResponse = await storageInstance.bulkWrite( - docData.map(d => ({ document: d })), - testContext - ) - if (Object.keys(writeResponse.error).length > 0) { - throw new Error('could not save') - } - const docs = Object.values(writeResponse.success) - - async function testQuery(query: FilledMangoQuery): Promise { - const preparedQuery = prepareQuery( - storageInstance!.schema, - query - ) - const docsViaQuery = (await storageInstance!.query(preparedQuery)).documents - if (docsViaQuery.length !== docsAmount) { - throw new Error('docs missing') } - const sortComparator = getSortComparator( - (storageInstance as any).schema, - query - ) - const docsViaSort = shuffleArray(docs).sort(sortComparator) - expect(docsViaQuery).toStrictEqual(docsViaSort) } - const queries: Array> = [ - { - selector: {}, - sort: [ - { id: 'asc' } - ], - skip: 0 - }, - { - selector: {}, - sort: [ - { equal: 'asc' }, - /** - * RxDB will always append the primaryKey as last sort parameter - * if the primary key is not used in the sorting before. - */ - { id: 'asc' } - ], - skip: 0 - }, - { - selector: {}, - sort: [ - { increment: 'desc' }, - { id: 'asc' } - ], - skip: 0 + ], testContext) + + expect(insertResult.error).toStrictEqual([]) + + const preparedQuery = prepareQuery( + schema, + { + selector: { + 'nes.ted': { + $eq: 'barfoo' + } }, - { - selector: {}, - sort: [ - { equal: 'asc' }, - { increment: 'desc' }, - { id: 'asc' } - ], - skip: 0 - } - ] - for (const query of queries) { - await testQuery(query) + sort: [ + { 'nes.ted': 'asc' }, + { id: 'asc' } + ], + skip: 0 } - }) - it('should be able to search over a nested object', async ({ expect }) => { - const schema = getNestedDocSchema() - storageInstance = await storage.createStorageInstance({ - databaseInstanceToken: randomCouchString(10), - databaseName: randomCouchString(12), - collectionName: randomCouchString(12), - schema, - options: {}, - multiInstance: true, - devMode: false - }) - const insertResult = await storageInstance.bulkWrite([ - { - document: { - id: 'foobar', - nes: { - ted: 'barfoo' - }, - _deleted: false, - _attachments: {}, - _rev: EXAMPLE_REVISION_1, - _meta: { - lwt: now() - } - } - } - ], testContext) + ) - expect(insertResult.error).toStrictEqual([]) - - const preparedQuery = prepareQuery( - schema, - { - selector: { - 'nes.ted': { - $eq: 'barfoo' - } - }, - sort: [ - { 'nes.ted': 'asc' }, - { id: 'asc' } - ], - skip: 0 - } - ) + const results = await storageInstance.query(preparedQuery) - const results = await storageInstance.query(preparedQuery) + expect(results.documents.length).toBe(1) - expect(results.documents.length).toBe(1) + await storageInstance.cleanup(Infinity) + }) + it('querying many documents should work', async ({ expect }) => { + const schema = getTestDataSchema() + const _storage = await testStorage.getStorage() + const storageInstance = await _storage.createStorageInstance({ + databaseInstanceToken: randomCouchString(10), + databaseName: randomCouchString(12), + collectionName: randomCouchString(12), + schema, + options: {}, + multiInstance: true, + devMode: false }) - it('querying many documents should work', async ({ expect }) => { - const schema = getTestDataSchema() - storageInstance = await storage.createStorageInstance({ - databaseInstanceToken: randomCouchString(10), - databaseName: randomCouchString(12), - collectionName: randomCouchString(12), - schema, - options: {}, - multiInstance: true, - devMode: false - }) - - const amount = 100 - - await storageInstance.bulkWrite( - new Array(amount) - .fill(0) - .map((_v, idx) => ({ - document: getWriteData({ - key: idx.toString().padStart(5, '0') + '-' + randomString(10), - value: idx + '' - }) - })), - testContext - ) - const preparedQuery = prepareQuery( - schema, - { - selector: {}, - skip: 0, - sort: [ - { key: 'asc' } - ] - } - ) - const results = await storageInstance.query(preparedQuery) + const amount = 100 - expect(results.documents.length).toBe(amount) - }) - }) + await storageInstance.bulkWrite( + new Array(amount) + .fill(0) + .map((_v, idx) => ({ + document: getWriteData({ + key: idx.toString().padStart(5, '0') + '-' + randomString(10), + value: idx + '' + }) + })), + testContext + ) - describe('.count()', () => { - it('should count the correct amount', async ({ expect }) => { - const schema = getTestDataSchema() - storageInstance = await storage.createStorageInstance({ - databaseInstanceToken: randomCouchString(10), - databaseName: randomCouchString(12), - collectionName: randomCouchString(12), - schema, - options: {}, - multiInstance: true, - devMode: false - }) - const preparedQueryAll = prepareQuery( - schema, - { - selector: {}, - sort: [ - { key: 'asc' } - ], - skip: 0 - } - ) - async function ensureCountIs(nr: number): Promise { - const result = await storageInstance!.count(preparedQueryAll) - expect(result.count).toBe(nr) + const preparedQuery = prepareQuery( + schema, + { + selector: {}, + skip: 0, + sort: [ + { key: 'asc' } + ] } - await ensureCountIs(0) + ) + const results = await storageInstance.query(preparedQuery) - await storageInstance.bulkWrite([{ document: getWriteData() }], testContext) - await ensureCountIs(1) + expect(results.documents.length).toBe(amount) - const writeData = getWriteData() - await storageInstance.bulkWrite([{ document: writeData }], testContext) - await ensureCountIs(2) - }) + await storageInstance.cleanup(Infinity) }) - describe('.findDocumentsById()', () => { - it('should find the documents', async ({ expect }) => { - storageInstance = await storage.createStorageInstance({ - databaseInstanceToken: randomCouchString(10), - databaseName: randomCouchString(12), - collectionName: randomCouchString(12), - schema: getPseudoSchemaForVersion(0, 'key'), - options: {}, - multiInstance: true, - devMode: false - }) + }) - const pkey = 'foobar' - const docData = { - key: pkey, - value: 'barfoo', - _deleted: false, - _attachments: {}, - _rev: EXAMPLE_REVISION_1, - _meta: { - lwt: now() - } + // TODO: tests failing as the query isnt returning any documents + describe('.count()', () => { + it('should count the correct amount', async ({ expect }) => { + const schema = getTestDataSchema() + const _storage = await testStorage.getStorage() + const storageInstance = await _storage.createStorageInstance({ + databaseInstanceToken: randomCouchString(10), + databaseName: randomCouchString(12), + collectionName: randomCouchString(12), + schema, + options: {}, + multiInstance: true, + devMode: false + }) + + const preparedQueryAll = prepareQuery( + storageInstance.schema, + { + selector: {}, + sort: [ + { key: 'asc' } + ], + skip: 0 } - await storageInstance.bulkWrite( - [{ - document: docData - }], - testContext - ) + ) + async function ensureCountIs(nr: number): Promise { + // TODO: this query is not returning any documents + const result = await storageInstance.count(preparedQueryAll) + expect(result.count).toBe(nr) + } + + async function writeDoc () { + const docData = getWriteData() + const writeResponse = await storageInstance.bulkWrite([{ document: clone(docData) }], testContext) + console.log(writeResponse) + + expect(writeResponse.error).toStrictEqual([]) + const first = writeResponse.success.at(0); + expect(docData).toStrictEqual(first) + } - const found = await storageInstance.findDocumentsById(['foobar'], false) - const foundDoc = found.at(0) + await ensureCountIs(0) + await writeDoc() + await ensureCountIs(1) + await writeDoc() + await ensureCountIs(2) - expect(foundDoc).toStrictEqual(docData) + await storageInstance.cleanup(Infinity) + }) + }) + + + describe('.findDocumentsById()', () => { + it('should find the documents', async ({ expect }) => { + const _storage = await testStorage.getStorage() + const storageInstance = await _storage.createStorageInstance({ + databaseInstanceToken: randomCouchString(10), + databaseName: randomCouchString(12), + collectionName: randomCouchString(12), + schema: getPseudoSchemaForVersion(0, 'key'), + options: {}, + multiInstance: true, + devMode: false }) - /** - * Some storage implementations ran into some limits - * like SQLite SQLITE_MAX_VARIABLE_NUMBER etc. - * Writing many documents must just work and the storage itself - * has to workaround any problems with that. - */ - it('should be able to insert and fetch many documents', async ({ expect }) => { - storageInstance = await storage.createStorageInstance({ - databaseInstanceToken: randomCouchString(10), - databaseName: randomCouchString(12), - collectionName: randomCouchString(12), - schema: getTestDataSchema(), - options: {}, - multiInstance: true, - devMode: false - }) + const pkey = randomCouchString(12) + const docData = { + key: pkey, + value: 'barfoo', + _deleted: false, + _attachments: {}, + _rev: EXAMPLE_REVISION_1, + _meta: { + lwt: now() + } + } + + const writeResponse = await storageInstance.bulkWrite( + [{ + document: clone(docData) + }], + testContext + ) - const amount = 5000 - const writeRows = new Array(amount) - .fill(0) - .map(() => ({ document: getWriteData() })) + expect(writeResponse.error).toStrictEqual([]) + const first = writeResponse.success.at(0); + expect(docData).toStrictEqual(first) + + const found = await storageInstance.findDocumentsById([pkey], false) + expect(found).toHaveLength(1) - // insert - const writeResult = await storageInstance.bulkWrite(writeRows, 'insert-many-' + amount) - expect(writeResult.error).toStrictEqual([]) + const foundDoc = found.at(0) + expect(foundDoc).toStrictEqual(docData) - // fetch again - const fetchResult = await storageInstance.findDocumentsById(writeRows.map(r => r.document.key), false) - expect(Object.keys(fetchResult).length).toStrictEqual(amount) - }, { timeout: 50000 }) + await storageInstance.cleanup(Infinity) }) + + /** + * Some storage implementations ran into some limits + * like SQLite SQLITE_MAX_VARIABLE_NUMBER etc. + * Writing many documents must just work and the storage itself + * has to workaround any problems with that. + */ + it('should be able to insert and fetch many documents', async ({ expect }) => { + const _storage = await testStorage.getStorage() + const storageInstance = await _storage.createStorageInstance({ + databaseInstanceToken: randomCouchString(10), + databaseName: randomCouchString(12), + collectionName: randomCouchString(12), + schema: getTestDataSchema(), + options: {}, + multiInstance: true, + devMode: false + }) + + const amount = 5000 + const writeRows = new Array(amount) + .fill(0) + .map(() => ({ document: getWriteData() })) + + // insert + const writeResult = await storageInstance.bulkWrite(writeRows, 'insert-many-' + amount) + expect(writeResult.error).toStrictEqual([]) + + // fetch again + const fetchResult = await storageInstance.findDocumentsById(writeRows.map(r => r.document.key), false) + expect(Object.keys(fetchResult).length).toStrictEqual(amount) + }, { timeout: 50000 }) }) describe('RxStorageInstance Queries', () => { @@ -1528,6 +1640,7 @@ export function runTestSuite(suite: TestSuite, testStorage: RxTestStorage): void } ] }) + testCorrectQueries(suite, testStorage, { testTitle: '$lt/$lte', data: [ @@ -1553,6 +1666,7 @@ export function runTestSuite(suite: TestSuite, testStorage: RxTestStorage): void } ] }) + testCorrectQueries(suite, testStorage, { testTitle: '$lt/$lte', data: [ @@ -1656,6 +1770,7 @@ export function runTestSuite(suite: TestSuite, testStorage: RxTestStorage): void } ] }) + testCorrectQueries(suite, testStorage, { testTitle: 'nested properties', data: [ @@ -1701,6 +1816,7 @@ export function runTestSuite(suite: TestSuite, testStorage: RxTestStorage): void } ] }) + testCorrectQueries(suite, testStorage, { testTitle: '$or', data: [ @@ -1791,6 +1907,7 @@ export function runTestSuite(suite: TestSuite, testStorage: RxTestStorage): void } ] }) + testCorrectQueries(suite, testStorage, { testTitle: '$in', data: [ @@ -1856,6 +1973,7 @@ export function runTestSuite(suite: TestSuite, testStorage: RxTestStorage): void } ] }) + testCorrectQueries(suite, testStorage, { testTitle: '$elemMatch/$size', data: [ @@ -2224,6 +2342,7 @@ export function runTestSuite(suite: TestSuite, testStorage: RxTestStorage): void } ] }) + /** * @link https://github.com/pubkey/rxdb/issues/5273 */ @@ -2325,6 +2444,7 @@ export function runTestSuite(suite: TestSuite, testStorage: RxTestStorage): void } ] }) + testCorrectQueries(suite, testStorage, { testTitle: '$type', data: [ @@ -2390,6 +2510,7 @@ export function runTestSuite(suite: TestSuite, testStorage: RxTestStorage): void } ] }) + testCorrectQueries<{ _id: string name: string